mirror of
https://github.com/Comfy-Org/ComfyUI_frontend.git
synced 2026-05-06 22:21:51 +00:00
Compare commits
23 Commits
feat/3410-
...
feat/Remot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
09942a5b7f | ||
|
|
560e53c68f | ||
|
|
1999b7fba0 | ||
|
|
285421a87c | ||
|
|
5523df1aea | ||
|
|
65876c635d | ||
|
|
28c97d3687 | ||
|
|
04918360eb | ||
|
|
97c2a0d364 | ||
|
|
af70d88860 | ||
|
|
c955309b26 | ||
|
|
7abd9d12c8 | ||
|
|
dd9cb42fa1 | ||
|
|
ccd19d8695 | ||
|
|
809fba7b36 | ||
|
|
df2ae6f2d0 | ||
|
|
3c7781190a | ||
|
|
167a1e6a0c | ||
|
|
e4e1546458 | ||
|
|
c1954028d1 | ||
|
|
5cad2c952b | ||
|
|
e356addeb6 | ||
|
|
e831daae59 |
23
.github/actions/ashby-pull/action.yaml
vendored
Normal file
23
.github/actions/ashby-pull/action.yaml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: Ashby Pull
|
||||
description: 'Refresh the apps/website Ashby roles snapshot from the Ashby job board API'
|
||||
inputs:
|
||||
api_key:
|
||||
description: 'Ashby API key (WEBSITE_ASHBY_API_KEY).'
|
||||
required: true
|
||||
job_board_name:
|
||||
description: 'Ashby job board name (WEBSITE_ASHBY_JOB_BOARD_NAME).'
|
||||
required: true
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
# Note: this action assumes the frontend repo is checked out at the workspace root.
|
||||
|
||||
- name: Setup frontend
|
||||
uses: ./.github/actions/setup-frontend
|
||||
|
||||
- name: Refresh Ashby snapshot
|
||||
shell: bash
|
||||
env:
|
||||
WEBSITE_ASHBY_API_KEY: ${{ inputs.api_key }}
|
||||
WEBSITE_ASHBY_JOB_BOARD_NAME: ${{ inputs.job_board_name }}
|
||||
run: pnpm --filter @comfyorg/website ashby:refresh-snapshot
|
||||
BIN
.github/pr-images/fe-237-before-after.png
vendored
Normal file
BIN
.github/pr-images/fe-237-before-after.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 44 KiB |
@@ -52,6 +52,9 @@ jobs:
|
||||
run: vercel pull --yes --environment=preview
|
||||
|
||||
- name: Build project artifacts
|
||||
env:
|
||||
WEBSITE_ASHBY_API_KEY: ${{ secrets.WEBSITE_ASHBY_API_KEY }}
|
||||
WEBSITE_ASHBY_JOB_BOARD_NAME: ${{ secrets.WEBSITE_ASHBY_JOB_BOARD_NAME }}
|
||||
run: vercel build
|
||||
|
||||
- name: Fetch head commit metadata
|
||||
@@ -146,6 +149,9 @@ jobs:
|
||||
run: vercel pull --yes --environment=production
|
||||
|
||||
- name: Build project artifacts
|
||||
env:
|
||||
WEBSITE_ASHBY_API_KEY: ${{ secrets.WEBSITE_ASHBY_API_KEY }}
|
||||
WEBSITE_ASHBY_JOB_BOARD_NAME: ${{ secrets.WEBSITE_ASHBY_JOB_BOARD_NAME }}
|
||||
run: vercel build --prod
|
||||
|
||||
- name: Deploy project artifacts to Vercel
|
||||
|
||||
3
.github/workflows/ci-website-build.yaml
vendored
3
.github/workflows/ci-website-build.yaml
vendored
@@ -36,4 +36,7 @@ jobs:
|
||||
uses: ./.github/actions/setup-frontend
|
||||
|
||||
- name: Build website
|
||||
env:
|
||||
WEBSITE_ASHBY_API_KEY: ${{ secrets.WEBSITE_ASHBY_API_KEY }}
|
||||
WEBSITE_ASHBY_JOB_BOARD_NAME: ${{ secrets.WEBSITE_ASHBY_JOB_BOARD_NAME }}
|
||||
run: pnpm --filter @comfyorg/website build
|
||||
|
||||
59
.github/workflows/release-website.yaml
vendored
Normal file
59
.github/workflows/release-website.yaml
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
# Description: Manual workflow to refresh the apps/website Ashby roles snapshot
|
||||
# and open a PR. Merging the PR triggers the existing Vercel website production
|
||||
# deploy via ci-vercel-website-preview.yaml.
|
||||
name: 'Release: Website'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: release-website
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
refresh-snapshot:
|
||||
if: github.repository == 'Comfy-Org/ComfyUI_frontend'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: main
|
||||
persist-credentials: false
|
||||
|
||||
- name: Refresh Ashby snapshot
|
||||
uses: ./.github/actions/ashby-pull
|
||||
with:
|
||||
api_key: ${{ secrets.WEBSITE_ASHBY_API_KEY }}
|
||||
job_board_name: ${{ secrets.WEBSITE_ASHBY_JOB_BOARD_NAME }}
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
|
||||
with:
|
||||
token: ${{ secrets.PR_GH_TOKEN }}
|
||||
commit-message: 'chore(website): refresh Ashby roles snapshot'
|
||||
title: 'chore(website): refresh Ashby roles snapshot'
|
||||
body: |
|
||||
Automated refresh of `apps/website/src/data/ashby-roles.snapshot.json`
|
||||
from the Ashby job board API.
|
||||
|
||||
**Flow:**
|
||||
1. `Release: Website` workflow ran (manual trigger).
|
||||
2. This PR opens with the regenerated snapshot.
|
||||
3. `CI: Vercel Website Preview` deploys a preview for review.
|
||||
4. Merging to `main` triggers the production Vercel deploy.
|
||||
|
||||
The snapshot fallback in `apps/website/src/utils/ashby.ts` remains
|
||||
intact: builds without `WEBSITE_ASHBY_API_KEY` continue to use the
|
||||
committed snapshot.
|
||||
|
||||
Triggered by workflow run `${{ github.run_id }}`.
|
||||
branch: chore/refresh-ashby-snapshot-${{ github.run_id }}
|
||||
base: main
|
||||
labels: |
|
||||
Release:Website
|
||||
delete-branch: true
|
||||
@@ -69,6 +69,50 @@ test.describe('Homepage @smoke', () => {
|
||||
).toBeVisible()
|
||||
})
|
||||
|
||||
test('CaseStudySpotlight CTA sizes to its content, not the column', async ({
|
||||
page
|
||||
}) => {
|
||||
const contentColumn = page.getByTestId('case-study-content')
|
||||
const cta = contentColumn.getByRole('link', {
|
||||
name: /see all case studies/i
|
||||
})
|
||||
|
||||
await cta.scrollIntoViewIfNeeded()
|
||||
await expect(cta).toBeVisible()
|
||||
|
||||
const [columnBox, ctaBox] = await Promise.all([
|
||||
contentColumn.boundingBox(),
|
||||
cta.boundingBox()
|
||||
])
|
||||
|
||||
expect(columnBox).not.toBeNull()
|
||||
expect(ctaBox).not.toBeNull()
|
||||
expect(ctaBox!.width).toBeLessThan(columnBox!.width * 0.7)
|
||||
})
|
||||
|
||||
test('CaseStudySpotlight CTA has breathing room above it on mobile @mobile', async ({
|
||||
page
|
||||
}) => {
|
||||
const contentColumn = page.getByTestId('case-study-content')
|
||||
const subheading = contentColumn.getByText(
|
||||
/Videos & case studies from teams/i
|
||||
)
|
||||
const cta = contentColumn.getByRole('link', {
|
||||
name: /see all case studies/i
|
||||
})
|
||||
|
||||
await cta.scrollIntoViewIfNeeded()
|
||||
|
||||
const [subBox, ctaBox] = await Promise.all([
|
||||
subheading.boundingBox(),
|
||||
cta.boundingBox()
|
||||
])
|
||||
|
||||
expect(subBox).not.toBeNull()
|
||||
expect(ctaBox).not.toBeNull()
|
||||
expect(ctaBox!.y - (subBox!.y + subBox!.height)).toBeGreaterThanOrEqual(24)
|
||||
})
|
||||
|
||||
test('BuildWhatSection is visible', async ({ page }) => {
|
||||
// "DOESN'T EXIST" is the actual badge text rendered in the Build What section
|
||||
await expect(page.getByText("DOESN'T EXIST")).toBeVisible()
|
||||
|
||||
58
apps/website/public/llms.txt
Normal file
58
apps/website/public/llms.txt
Normal file
@@ -0,0 +1,58 @@
|
||||
# Comfy
|
||||
|
||||
> Comfy is the AI creation engine for visual professionals who demand control over every model, every parameter, and every output. Built around ComfyUI — the open-source node-graph runtime with 60,000+ community nodes and thousands of shared workflows — Comfy ships as a free local app, a managed cloud, an API, and an enterprise platform.
|
||||
|
||||
The Comfy ecosystem spans four surfaces:
|
||||
|
||||
- **ComfyUI (local)** — the open-source node-graph runtime that runs models on your own hardware.
|
||||
- **Comfy Cloud** — managed ComfyUI in the browser, with hosted models and storage.
|
||||
- **Comfy API** — a REST API for triggering workflows from your own apps and pipelines.
|
||||
- **Comfy Enterprise** — single-tenant deployments, BYO keys, data ownership, and orchestration for teams.
|
||||
|
||||
Studios building with Comfy include Series Entertainment, Moment Factory, Open Story Movement, and Ubisoft (La Forge). Use cases concentrate in VFX & animation, advertising & creative studios, gaming, and eCommerce/fashion.
|
||||
|
||||
## Product
|
||||
|
||||
- [Homepage](https://comfy.org/): Overview of Comfy and the four product surfaces (Local, Cloud, API, Enterprise).
|
||||
- [Download Comfy (Local)](https://comfy.org/download/): Free desktop app for macOS, Windows, and Linux — runs ComfyUI on your own GPU.
|
||||
- [Comfy Cloud](https://comfy.org/cloud/): Managed ComfyUI in the browser with hosted models and storage; no local install required.
|
||||
- [Comfy Cloud Pricing](https://comfy.org/cloud/pricing/): Plans and per-credit pricing for individuals and teams using Comfy Cloud.
|
||||
- [Comfy API](https://comfy.org/api/): REST API for triggering ComfyUI workflows programmatically from external apps.
|
||||
- [Comfy Enterprise](https://comfy.org/cloud/enterprise/): Single-tenant ComfyUI deployments with BYO keys, orchestration, and data-ownership guarantees.
|
||||
|
||||
## Workflows and Gallery
|
||||
|
||||
- [Workflow Gallery](https://comfy.org/gallery/): Curated showcase of ComfyUI outputs — images, video, and 3D — produced by the community.
|
||||
- [Community Workflows](https://www.comfy.org/workflows/): Browseable library of community-shared ComfyUI workflows you can load and remix.
|
||||
|
||||
## Customers and Case Studies
|
||||
|
||||
- [Customer Stories](https://comfy.org/customers/): Index of named customers and how they use ComfyUI in production.
|
||||
- [Series Entertainment](https://comfy.org/customers/series-entertainment/): How Series Entertainment rebuilt game and video production around ComfyUI.
|
||||
- [Moment Factory](https://comfy.org/customers/moment-factory/): Architectural-scale 3D projection mapping reimagined with ComfyUI at Moment Factory.
|
||||
- [Ubisoft — Chord](https://comfy.org/customers/ubisoft-chord/): Ubisoft La Forge open-sourcing the Chord model and its ComfyUI integration.
|
||||
- [Open Story Movement](https://comfy.org/customers/open-story-movement/): How an open-source movement around AI storytelling builds on ComfyUI.
|
||||
|
||||
## Developers and Documentation
|
||||
|
||||
- [ComfyUI Docs](https://docs.comfy.org/): Official documentation for installing, configuring, and extending ComfyUI.
|
||||
- [ComfyUI on GitHub](https://github.com/comfyanonymous/ComfyUI): Source repository for the open-source ComfyUI runtime.
|
||||
- [Comfy-Org on GitHub](https://github.com/Comfy-Org): Organization-wide repositories — frontend, registry, manager, docs, and tooling.
|
||||
- [Comfy Registry](https://registry.comfy.org/): Public registry of ComfyUI custom nodes and extensions, with versioning and search.
|
||||
|
||||
## Company
|
||||
|
||||
- [About Comfy](https://comfy.org/about/): Company background, mission, and the team behind ComfyUI.
|
||||
- [Careers](https://comfy.org/careers/): Open roles across engineering, design, product, and go-to-market.
|
||||
- [Contact](https://comfy.org/contact/): Sales, partnership, and general contact form.
|
||||
- [Blog](https://blog.comfy.org/): Product announcements, technical deep-dives, and customer stories.
|
||||
- [Privacy Policy](https://comfy.org/privacy-policy/): How Comfy collects, uses, and protects personal information.
|
||||
- [Terms of Service](https://comfy.org/terms-of-service/): Terms governing use of ComfyUI and related Comfy services.
|
||||
|
||||
## Optional
|
||||
|
||||
- [简体中文 / Chinese homepage](https://comfy.org/zh-CN/): Simplified Chinese localization of the main site.
|
||||
- [Series Entertainment — long-form case study](https://comfy.org/cloud/enterprise-case-studies/how-series-entertainment-rebuilt-game-and-video-production-with-comfyui): Extended write-up of the Series Entertainment deployment.
|
||||
- [Moment Factory — long-form case study](https://comfy.org/cloud/enterprise-case-studies/comfyui-at-architectural-scale-how-moment-factory-reimagined-3d-projection-mapping): Extended write-up of Moment Factory's projection-mapping pipeline.
|
||||
- [Ubisoft Chord announcement (blog)](https://blog.comfy.org/p/ubisoft-open-sources-the-chord-model): Original blog post announcing Ubisoft's open-source Chord model.
|
||||
- [Open-source storytelling (blog)](https://blog.comfy.org/p/how-open-source-is-fueling-the-open): Blog post on how open source is fueling the Open Story Movement.
|
||||
@@ -1,4 +1,33 @@
|
||||
User-agent: *
|
||||
Allow: /
|
||||
# robots.txt for comfy.org
|
||||
# Open to all crawlers — including AI/LLM bots — for maximum visibility
|
||||
# in AI-powered search, chat-based answer engines, and traditional search.
|
||||
# Granular UAs are listed explicitly to signal intent; rules are shared
|
||||
# via stacked user-agent records (RFC 9309 §2.2).
|
||||
|
||||
Sitemap: https://comfy.org/sitemap-0.xml
|
||||
User-agent: *
|
||||
User-agent: Googlebot
|
||||
User-agent: Bingbot
|
||||
User-agent: DuckDuckBot
|
||||
User-agent: GPTBot
|
||||
User-agent: ChatGPT-User
|
||||
User-agent: OAI-SearchBot
|
||||
User-agent: Google-Extended
|
||||
User-agent: ClaudeBot
|
||||
User-agent: Claude-Web
|
||||
User-agent: anthropic-ai
|
||||
User-agent: PerplexityBot
|
||||
User-agent: Perplexity-User
|
||||
User-agent: Applebot
|
||||
User-agent: Applebot-Extended
|
||||
User-agent: Bytespider
|
||||
User-agent: Amazonbot
|
||||
User-agent: CCBot
|
||||
User-agent: Meta-ExternalAgent
|
||||
User-agent: Meta-ExternalFetcher
|
||||
User-agent: Diffbot
|
||||
Allow: /
|
||||
Disallow: /_astro/
|
||||
Disallow: /_website/
|
||||
Disallow: /_vercel/
|
||||
|
||||
Sitemap: https://comfy.org/sitemap-index.xml
|
||||
|
||||
83
apps/website/scripts/README.md
Normal file
83
apps/website/scripts/README.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Website Scripts
|
||||
|
||||
## `refresh-ashby-snapshot.ts`
|
||||
|
||||
Pulls the latest job postings from Ashby and writes
|
||||
`src/data/ashby-roles.snapshot.json`. Invoked by the `Release: Website`
|
||||
GitHub Actions workflow; also runnable locally via
|
||||
`pnpm --filter @comfyorg/website ashby:refresh-snapshot`.
|
||||
|
||||
## `process-videos.sh`
|
||||
|
||||
Generates multi-resolution VP9/WebM + H.264/MP4 variants and a poster
|
||||
frame for marketing videos using `ffmpeg`. Run **locally** before
|
||||
uploading the outputs to `media.comfy.org`; this is not wired into CI.
|
||||
|
||||
```sh
|
||||
apps/website/scripts/process-videos.sh \
|
||||
./video-sources \
|
||||
./dist/videos \
|
||||
"640 960 1280 1920"
|
||||
```
|
||||
|
||||
### Output
|
||||
|
||||
For each source video at `./video-sources/foo.mp4`, you get:
|
||||
|
||||
```text
|
||||
foo-640.webm foo-640.mp4
|
||||
foo-960.webm foo-960.mp4
|
||||
foo-1280.webm foo-1280.mp4
|
||||
foo-1920.webm foo-1920.mp4
|
||||
foo-poster.jpg
|
||||
```
|
||||
|
||||
The naming convention is enforced by `buildVideoSources()` in
|
||||
`src/utils/video.ts`, which the `<SiteVideo>` Vue component uses to
|
||||
emit `<source>` URLs.
|
||||
|
||||
### Pairing with `<SiteVideo>`
|
||||
|
||||
Once the assets are uploaded, render them with:
|
||||
|
||||
```vue
|
||||
<SiteVideo
|
||||
name="foo"
|
||||
base-url="https://media.comfy.org/website/marketing"
|
||||
:width="1280"
|
||||
:formats="['webm', 'mp4']"
|
||||
poster="https://media.comfy.org/website/marketing/foo-poster.jpg"
|
||||
autoplay
|
||||
loop
|
||||
/>
|
||||
```
|
||||
|
||||
### `<SiteVideo>` vs `<VideoPlayer>`
|
||||
|
||||
- **`SiteVideo`** — lightweight multi-source `<video>` for decorative or
|
||||
autoplay marketing clips. No custom controls, no captions UI.
|
||||
- **`VideoPlayer`** — full-featured player with custom scrubber, mute,
|
||||
fullscreen, and caption toggles. Use this for content with subtitles or
|
||||
user-driven playback.
|
||||
|
||||
If you need both responsive sources and the rich `VideoPlayer` chrome, the
|
||||
two are not yet combined; either pick one or extend `VideoPlayer` to accept
|
||||
a source list.
|
||||
|
||||
### Encoder choices
|
||||
|
||||
- **VP9/WebM** at CRF 32 — preferred by Chrome and Firefox; smaller files.
|
||||
- **H.264/MP4** at CRF 23, High profile, `+faststart` — universal fallback,
|
||||
required for Safari iOS.
|
||||
- **Poster JPG** at q4 — extracted from t=1s when the clip is long enough,
|
||||
otherwise t=0; scaled to 1280w. Use this as the `poster` attribute so
|
||||
the video shows something while loading.
|
||||
|
||||
### Why a single resolution per video
|
||||
|
||||
`<source media="...">` inside `<video>` is unreliable across browsers
|
||||
(Safari ignores it). The simplest correct strategy is to ship one
|
||||
well-sized resolution and let CSS scale it down on smaller viewports.
|
||||
The script generates multiple widths so you can pick a different one
|
||||
per page (e.g. 1280w for a hero, 640w for a thumbnail), or wire up
|
||||
JavaScript-based selection later if metrics demand it.
|
||||
110
apps/website/scripts/process-videos.sh
Executable file
110
apps/website/scripts/process-videos.sh
Executable file
@@ -0,0 +1,110 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Generate multi-resolution VP9/WebM + H.264/MP4 variants and a poster frame
|
||||
# for every source video in a given directory. Intended to be run locally
|
||||
# before uploading the outputs to media.comfy.org.
|
||||
#
|
||||
# Usage:
|
||||
# apps/website/scripts/process-videos.sh <input-dir> <output-dir> [widths]
|
||||
#
|
||||
# Example:
|
||||
# apps/website/scripts/process-videos.sh \
|
||||
# ./video-sources \
|
||||
# ./dist/videos \
|
||||
# "640 960 1280 1920"
|
||||
#
|
||||
# Defaults to widths "1280" if omitted.
|
||||
#
|
||||
# Output naming matches buildVideoSources() in src/utils/video.ts:
|
||||
# <name>-<width>.webm
|
||||
# <name>-<width>.mp4
|
||||
# <name>-poster.jpg (single 1280w poster, suitable for SiteVideo)
|
||||
#
|
||||
# Requires ffmpeg and ffprobe on PATH. Tested with ffmpeg 6.x and 7.x.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
if [[ $# -lt 2 ]]; then
|
||||
cat <<USAGE >&2
|
||||
Usage: $0 <input-dir> <output-dir> [widths]
|
||||
widths: space-separated list, e.g. "640 1280 1920" (default: "1280")
|
||||
USAGE
|
||||
exit 64
|
||||
fi
|
||||
|
||||
input_dir=$1
|
||||
output_dir=$2
|
||||
widths=${3:-1280}
|
||||
|
||||
for tool in ffmpeg ffprobe; do
|
||||
if ! command -v "$tool" >/dev/null 2>&1; then
|
||||
echo "error: $tool not found on PATH" >&2
|
||||
exit 127
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ! -d $input_dir ]]; then
|
||||
echo "error: input dir not found: $input_dir" >&2
|
||||
exit 66
|
||||
fi
|
||||
|
||||
mkdir -p "$output_dir"
|
||||
|
||||
shopt -s nullglob nocaseglob
|
||||
sources=("$input_dir"/*.{mp4,mov,webm,mkv})
|
||||
shopt -u nullglob nocaseglob
|
||||
|
||||
if [[ ${#sources[@]} -eq 0 ]]; then
|
||||
echo "error: no source videos in $input_dir (looked for .mp4 .mov .webm .mkv)" >&2
|
||||
exit 66
|
||||
fi
|
||||
|
||||
for src in "${sources[@]}"; do
|
||||
name=$(basename "$src")
|
||||
name=${name%.*}
|
||||
echo "==> $name"
|
||||
|
||||
for w in $widths; do
|
||||
webm_out="$output_dir/${name}-${w}.webm"
|
||||
mp4_out="$output_dir/${name}-${w}.mp4"
|
||||
|
||||
echo " encoding ${w}w VP9/WebM -> $webm_out"
|
||||
ffmpeg -y -hide_banner -loglevel error \
|
||||
-i "$src" \
|
||||
-vf "scale=${w}:-2:flags=lanczos" \
|
||||
-c:v libvpx-vp9 -b:v 0 -crf 32 -row-mt 1 -tile-columns 2 \
|
||||
-c:a libopus -b:a 96k \
|
||||
-f webm "$webm_out"
|
||||
|
||||
echo " encoding ${w}w H.264/MP4 -> $mp4_out"
|
||||
ffmpeg -y -hide_banner -loglevel error \
|
||||
-i "$src" \
|
||||
-vf "scale=${w}:-2:flags=lanczos" \
|
||||
-c:v libx264 -crf 23 -preset slow -profile:v high -pix_fmt yuv420p \
|
||||
-c:a aac -b:a 128k \
|
||||
-movflags +faststart \
|
||||
"$mp4_out"
|
||||
done
|
||||
|
||||
poster_out="$output_dir/${name}-poster.jpg"
|
||||
duration_raw=$(ffprobe -v error -show_entries format=duration \
|
||||
-of default=noprint_wrappers=1:nokey=1 "$src" 2>/dev/null || true)
|
||||
if [[ $duration_raw =~ ^[0-9]+([.][0-9]+)?$ ]]; then
|
||||
duration="$duration_raw"
|
||||
else
|
||||
duration=0
|
||||
fi
|
||||
if awk -v d="$duration" 'BEGIN { exit !(d >= 1.0) }'; then
|
||||
poster_seek=1
|
||||
else
|
||||
poster_seek=0
|
||||
fi
|
||||
echo " extracting poster (t=${poster_seek}s) -> $poster_out"
|
||||
ffmpeg -y -hide_banner -loglevel error \
|
||||
-ss "$poster_seek" -i "$src" \
|
||||
-vframes 1 -vf "scale=1280:-2:flags=lanczos" \
|
||||
-q:v 4 \
|
||||
"$poster_out"
|
||||
done
|
||||
|
||||
echo "done. upload contents of $output_dir to media.comfy.org."
|
||||
51
apps/website/src/assets/marketing/README.md
Normal file
51
apps/website/src/assets/marketing/README.md
Normal file
@@ -0,0 +1,51 @@
|
||||
# Marketing Assets
|
||||
|
||||
Source images committed here are processed by Astro at build time and emitted
|
||||
as multiple formats (AVIF, WebP) at multiple widths (640w, 960w, 1280w, 1920w).
|
||||
|
||||
## Usage
|
||||
|
||||
Drop a high-resolution source image (PNG or JPG) here, then render it with
|
||||
Astro's built-in `<Picture>` component plus the shared defaults:
|
||||
|
||||
```astro
|
||||
---
|
||||
import { Picture } from 'astro:assets'
|
||||
import {
|
||||
MARKETING_FORMATS,
|
||||
MARKETING_WIDTHS
|
||||
} from '../utils/marketingImage'
|
||||
import hero from '../assets/marketing/hero.png'
|
||||
---
|
||||
<Picture
|
||||
src={hero}
|
||||
alt="ComfyUI workflow preview"
|
||||
formats={[...MARKETING_FORMATS]}
|
||||
widths={[...MARKETING_WIDTHS]}
|
||||
sizes="(max-width: 768px) 100vw, 50vw"
|
||||
/>
|
||||
```
|
||||
|
||||
The component generates a `<picture>` element with `<source>` tags for AVIF
|
||||
and WebP, plus an `<img>` fallback. Output files are hashed and emitted under
|
||||
`dist/_website/` for long-term caching.
|
||||
|
||||
A custom Astro wrapper component is intentionally not provided: Astro's
|
||||
discriminated union `LocalImageProps | RemoteImageProps` for `<Picture>` makes
|
||||
a thin wrapper that mutates `widths` / `formats` impractical to type safely
|
||||
without `as` casts. The shared constants give us the same consistency benefit
|
||||
without that cost.
|
||||
|
||||
## When to use this vs. `media.comfy.org`
|
||||
|
||||
- **Use `src/assets/marketing/`** for static marketing images that are part of
|
||||
page content (hero shots, product imagery, illustrations). Build-time
|
||||
processing gives you AVIF/WebP variants automatically.
|
||||
- **Use `media.comfy.org`** for video content, large/changing image libraries
|
||||
(gallery), and anything shared across properties.
|
||||
|
||||
## Source image guidelines
|
||||
|
||||
- Provide the largest size you'll ever need (≥1920px wide).
|
||||
- PNG for screenshots/illustrations with sharp edges; JPG for photographs.
|
||||
- Astro will downscale; it will not upscale. Always supply at least 1920w.
|
||||
68
apps/website/src/components/common/SiteVideo.vue
Normal file
68
apps/website/src/components/common/SiteVideo.vue
Normal file
@@ -0,0 +1,68 @@
|
||||
<script setup lang="ts">
|
||||
import { cn } from '@comfyorg/tailwind-utils'
|
||||
import { computed } from 'vue'
|
||||
|
||||
import { buildVideoSources, videoKey } from '../../utils/video'
|
||||
import type { VideoFormat } from '../../utils/video'
|
||||
|
||||
const {
|
||||
name,
|
||||
baseUrl,
|
||||
width = 1280,
|
||||
formats = ['webm', 'mp4'],
|
||||
poster,
|
||||
alt,
|
||||
autoplay = false,
|
||||
loop = false,
|
||||
muted = autoplay,
|
||||
controls = false,
|
||||
preload = autoplay ? 'auto' : 'metadata',
|
||||
containerClass,
|
||||
videoClass
|
||||
} = defineProps<{
|
||||
name: string
|
||||
baseUrl: string
|
||||
width?: number
|
||||
formats?: VideoFormat[]
|
||||
poster?: string
|
||||
alt?: string
|
||||
autoplay?: boolean
|
||||
loop?: boolean
|
||||
muted?: boolean
|
||||
controls?: boolean
|
||||
preload?: 'auto' | 'metadata' | 'none'
|
||||
containerClass?: string
|
||||
videoClass?: string
|
||||
}>()
|
||||
|
||||
const sources = computed(() =>
|
||||
buildVideoSources({ name, baseUrl, width, formats })
|
||||
)
|
||||
const remountKey = computed(() => videoKey(sources.value))
|
||||
const decorative = computed(() => !alt && !controls)
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div :class="cn('relative', containerClass)">
|
||||
<video
|
||||
:key="remountKey"
|
||||
:class="cn('size-full', videoClass)"
|
||||
:poster
|
||||
:preload
|
||||
:autoplay
|
||||
:loop
|
||||
:muted
|
||||
:controls
|
||||
:aria-label="alt"
|
||||
:aria-hidden="decorative ? true : undefined"
|
||||
playsinline
|
||||
>
|
||||
<source
|
||||
v-for="source in sources"
|
||||
:key="source.src"
|
||||
:src="source.src"
|
||||
:type="source.type"
|
||||
/>
|
||||
</video>
|
||||
</div>
|
||||
</template>
|
||||
@@ -35,7 +35,10 @@ const routes = getRoutes(locale)
|
||||
</div>
|
||||
|
||||
<!-- Right: content -->
|
||||
<div class="flex flex-col justify-between p-6 lg:flex-1">
|
||||
<div
|
||||
data-testid="case-study-content"
|
||||
class="flex flex-col justify-between p-6 lg:flex-1"
|
||||
>
|
||||
<div class="flex flex-col gap-8">
|
||||
<p
|
||||
class="text-primary-comfy-yellow text-sm font-bold tracking-widest uppercase"
|
||||
@@ -52,12 +55,8 @@ const routes = getRoutes(locale)
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col gap-3 sm:flex-row">
|
||||
<BrandButton
|
||||
:href="routes.customers"
|
||||
variant="outline"
|
||||
class="flex-1 text-center"
|
||||
>
|
||||
<div class="mt-8 flex flex-col items-start gap-3 sm:flex-row lg:mt-0">
|
||||
<BrandButton :href="routes.customers" variant="outline">
|
||||
{{ t('caseStudy.seeAll', locale) }}
|
||||
</BrandButton>
|
||||
</div>
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
<script setup lang="ts">
|
||||
import type { Locale } from '../../i18n/translations'
|
||||
import { externalLinks } from '../../config/routes'
|
||||
import { t } from '../../i18n/translations'
|
||||
import BrandButton from '../common/BrandButton.vue'
|
||||
|
||||
const { locale = 'en' } = defineProps<{ locale?: Locale }>()
|
||||
</script>
|
||||
@@ -32,6 +34,15 @@ const { locale = 'en' } = defineProps<{ locale?: Locale }>()
|
||||
>
|
||||
{{ t('hero.subtitle', locale) }}
|
||||
</p>
|
||||
|
||||
<BrandButton
|
||||
:href="externalLinks.workflows"
|
||||
variant="outline"
|
||||
size="lg"
|
||||
class="mt-8 w-full p-4 uppercase lg:w-auto lg:min-w-60"
|
||||
>
|
||||
{{ t('hero.runFirstWorkflow', locale) }}
|
||||
</BrandButton>
|
||||
</div>
|
||||
</section>
|
||||
</template>
|
||||
|
||||
@@ -101,17 +101,9 @@ const features: IncludedFeature[] = [
|
||||
class="mt-0.5 size-4 shrink-0"
|
||||
aria-hidden="true"
|
||||
/>
|
||||
<div>
|
||||
<p class="text-primary-comfy-canvas text-sm font-medium">
|
||||
{{ t(feature.titleKey, locale) }}
|
||||
</p>
|
||||
<span
|
||||
v-if="feature.isComingSoon"
|
||||
class="text-primary-comfy-yellow mt-1 inline-block text-xs"
|
||||
>
|
||||
{{ t('pricing.included.comingSoon', locale) }}
|
||||
</span>
|
||||
</div>
|
||||
<p class="text-primary-comfy-canvas text-sm font-medium">
|
||||
{{ t(feature.titleKey, locale) }}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- Description -->
|
||||
|
||||
@@ -52,6 +52,15 @@ export const customerStories: CustomerStory[] = [
|
||||
detailPrefix: 'customers.detail.ubisoft-chord',
|
||||
readMoreHref:
|
||||
'https://blog.comfy.org/p/ubisoft-open-sources-the-chord-model'
|
||||
},
|
||||
{
|
||||
slug: 'groove-jones',
|
||||
image:
|
||||
'https://media.comfy.org/website/customers/groove-jones/crocs-nfl-dicks-sporting-goods-fooh.webp',
|
||||
category: 'customers.story.groove-jones.category',
|
||||
title: 'customers.story.groove-jones.title',
|
||||
body: 'customers.story.groove-jones.body',
|
||||
detailPrefix: 'customers.detail.groove-jones'
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@@ -1,24 +1,10 @@
|
||||
{
|
||||
"fetchedAt": "2026-04-24T18:59:03.989Z",
|
||||
"fetchedAt": "2026-05-02T20:15:18.321Z",
|
||||
"departments": [
|
||||
{
|
||||
"name": "DESIGN",
|
||||
"key": "design",
|
||||
"roles": [
|
||||
{
|
||||
"id": "4c5d6afb78652df7",
|
||||
"title": "Freelance Motion Designer",
|
||||
"department": "Design",
|
||||
"location": "San Francisco",
|
||||
"applyUrl": "https://jobs.ashbyhq.com/comfy-org/a7ccc2b4-4d9d-4e04-b39c-28a711995b5b/application"
|
||||
},
|
||||
{
|
||||
"id": "0f5256cf302e552b",
|
||||
"title": "Creative Artist",
|
||||
"department": "Design",
|
||||
"location": "San Francisco",
|
||||
"applyUrl": "https://jobs.ashbyhq.com/comfy-org/19ba10aa-4961-45e8-8473-66a8a7a8079d/application"
|
||||
},
|
||||
{
|
||||
"id": "e915f2c78b17f93b",
|
||||
"title": "Senior Product Designer",
|
||||
@@ -33,13 +19,6 @@
|
||||
"location": "San Francisco",
|
||||
"applyUrl": "https://jobs.ashbyhq.com/comfy-org/abc787b9-ad85-421c-8218-debd23bea096/application"
|
||||
},
|
||||
{
|
||||
"id": "5746486d87874937",
|
||||
"title": "Graphic Designer",
|
||||
"department": "Design",
|
||||
"location": "San Francisco",
|
||||
"applyUrl": "https://jobs.ashbyhq.com/comfy-org/49fa0b07-3fa1-4a3a-b2c6-d2cc684ad63f/application"
|
||||
},
|
||||
{
|
||||
"id": "547b6ba622c800a5",
|
||||
"title": "Senior Product Designer - Craft",
|
||||
@@ -115,6 +94,13 @@
|
||||
"department": "Engineering",
|
||||
"location": "San Francisco",
|
||||
"applyUrl": "https://jobs.ashbyhq.com/comfy-org/9e4b9029-c3e9-436b-82c4-a1a9f1b8c16e/application"
|
||||
},
|
||||
{
|
||||
"id": "2eb53e8943cc9396",
|
||||
"title": "Growth Engineer",
|
||||
"department": "Engineering",
|
||||
"location": "San Francisco",
|
||||
"applyUrl": "https://jobs.ashbyhq.com/comfy-org/f1fdde76-84ae-48c1-b0f9-9654dd8e7de5/application"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -122,6 +108,27 @@
|
||||
"name": "MARKETING",
|
||||
"key": "marketing",
|
||||
"roles": [
|
||||
{
|
||||
"id": "4c5d6afb78652df7",
|
||||
"title": "Freelance Motion Designer",
|
||||
"department": "Marketing",
|
||||
"location": "San Francisco",
|
||||
"applyUrl": "https://jobs.ashbyhq.com/comfy-org/a7ccc2b4-4d9d-4e04-b39c-28a711995b5b/application"
|
||||
},
|
||||
{
|
||||
"id": "0f5256cf302e552b",
|
||||
"title": "Creative Artist",
|
||||
"department": "Marketing",
|
||||
"location": "San Francisco",
|
||||
"applyUrl": "https://jobs.ashbyhq.com/comfy-org/19ba10aa-4961-45e8-8473-66a8a7a8079d/application"
|
||||
},
|
||||
{
|
||||
"id": "5746486d87874937",
|
||||
"title": "Graphic Designer",
|
||||
"department": "Marketing",
|
||||
"location": "San Francisco",
|
||||
"applyUrl": "https://jobs.ashbyhq.com/comfy-org/49fa0b07-3fa1-4a3a-b2c6-d2cc684ad63f/application"
|
||||
},
|
||||
{
|
||||
"id": "b5803a0d4785d406",
|
||||
"title": "Lifecycle Growth Marketer",
|
||||
@@ -144,7 +151,7 @@
|
||||
"roles": [
|
||||
{
|
||||
"id": "ec68ae44dd5943c9",
|
||||
"title": "Senior Technical Recruiter",
|
||||
"title": "Talent Lead",
|
||||
"department": "Operations",
|
||||
"location": "San Francisco",
|
||||
"applyUrl": "https://jobs.ashbyhq.com/comfy-org/d5008532-c45d-46e6-ba2c-20489d364362/application"
|
||||
|
||||
@@ -11,6 +11,10 @@ const translations = {
|
||||
'zh-CN':
|
||||
'Comfy 是面向专业视觉人士的 AI 创作引擎。您可以精确掌控每个模型、每个参数和每个输出。'
|
||||
},
|
||||
'hero.runFirstWorkflow': {
|
||||
en: 'Run your first workflow',
|
||||
'zh-CN': '运行你的第一个工作流'
|
||||
},
|
||||
|
||||
// ProductShowcaseSection
|
||||
'showcase.subtitle1': {
|
||||
@@ -910,9 +914,9 @@ const translations = {
|
||||
'zh-CN': '我应该选择 Comfy Cloud 还是本地 ComfyUI(自托管)?'
|
||||
},
|
||||
'cloud.faq.3.a': {
|
||||
en: "Comfy Cloud (beta) has zero setup, is easy to share with your team, and is faster than most GPUs you can run on a desktop workstation. You can immediately run the best models and workflows from the community on Comfy Cloud.\nLocal ComfyUI is infinitely customizable, works offline, and you don't need to worry about queue times. However, depending on what you want to create, you might need to have a good GPU and some amount of technical knowledge to install community-created custom nodes.",
|
||||
en: "Comfy Cloud has zero setup, is easy to share with your team, and is faster than most GPUs you can run on a desktop workstation. You can immediately run the best models and workflows from the community on Comfy Cloud.\nLocal ComfyUI is infinitely customizable, works offline, and you don't need to worry about queue times. However, depending on what you want to create, you might need to have a good GPU and some amount of technical knowledge to install community-created custom nodes.",
|
||||
'zh-CN':
|
||||
'Comfy Cloud(测试版)无需任何设置,方便与团队共享,比大多数桌面工作站 GPU 更快。您可以立即在 Comfy Cloud 上运行社区中最好的模型和工作流。\n本地 ComfyUI 可以无限定制,支持离线工作,无需担心排队时间。但根据您的创作需求,可能需要一块好的 GPU 以及一定的技术知识来安装社区创建的自定义节点。'
|
||||
'Comfy Cloud 无需任何设置,方便与团队共享,比大多数桌面工作站 GPU 更快。您可以立即在 Comfy Cloud 上运行社区中最好的模型和工作流。\n本地 ComfyUI 可以无限定制,支持离线工作,无需担心排队时间。但根据您的创作需求,可能需要一块好的 GPU 以及一定的技术知识来安装社区创建的自定义节点。'
|
||||
},
|
||||
'cloud.faq.4.q': {
|
||||
en: 'Do I need a GPU or a strong computer to use Comfy Cloud?',
|
||||
@@ -1276,10 +1280,6 @@ const translations = {
|
||||
en: 'Run multiple workflows in parallel to speed up your pipeline.',
|
||||
'zh-CN': '并行运行多个工作流,加速你的流程。'
|
||||
},
|
||||
'pricing.included.comingSoon': {
|
||||
en: 'coming soon',
|
||||
'zh-CN': '即将推出'
|
||||
},
|
||||
|
||||
// VideoPlayer
|
||||
'player.play': { en: 'Play', 'zh-CN': '播放' },
|
||||
@@ -2243,6 +2243,20 @@ const translations = {
|
||||
'zh-CN':
|
||||
'育碧 La Forge 开源了 CHORD PBR 材质估算模型及 ComfyUI 自定义节点,为 AAA 游戏制作实现了端到端的纹理生成工作流。'
|
||||
},
|
||||
'customers.story.groove-jones.category': {
|
||||
en: 'CASE STUDY',
|
||||
'zh-CN': '案例研究'
|
||||
},
|
||||
'customers.story.groove-jones.title': {
|
||||
en: "How Groove Jones Delivered a Holiday FOOH Campaign for Dick's Sporting Goods with Comfy",
|
||||
'zh-CN':
|
||||
"Groove Jones 如何借助 Comfy 为 Dick's Sporting Goods 打造节日 FOOH 营销"
|
||||
},
|
||||
'customers.story.groove-jones.body': {
|
||||
en: 'Groove Jones, a Dallas-based creative studio, used Comfy to deliver a hyper-realistic FOOH holiday campaign for the Crocs x NFL collection on a fast-approaching deadline.',
|
||||
'zh-CN':
|
||||
'达拉斯创意工作室 Groove Jones 借助 Comfy,在紧迫的节日档期内为 Crocs x NFL 联名系列交付了超写实的 FOOH 营销内容。'
|
||||
},
|
||||
'customers.story.readMore': {
|
||||
en: 'READ MORE ON THIS TOPIC',
|
||||
'zh-CN': '阅读更多相关内容'
|
||||
@@ -3276,6 +3290,227 @@ const translations = {
|
||||
'zh-CN': 'ComfyUI 博客'
|
||||
},
|
||||
|
||||
// Customer Detail: Groove Jones
|
||||
// Topic 1: Intro
|
||||
'customers.detail.groove-jones.topic-1.label': {
|
||||
en: 'INTRO',
|
||||
'zh-CN': '简介'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-1.block.0': {
|
||||
en: 'Groove Jones, a Dallas-based creative studio, builds AI-driven campaigns and immersive experiences for major brands where photoreal polish, creative ambition, and social-ready speed all have to land together. As their work expanded across AI Video, AR, VR, and WebGL for clients like Crocs, the NFL, and Dick\u2019s Sporting Goods, they faced a recurring challenge: delivering feature-film-quality VFX on commercial timelines and budgets.',
|
||||
'zh-CN':
|
||||
'位于达拉斯的创意工作室 Groove Jones,为众多大牌客户打造由 AI 驱动的营销活动和沉浸式体验,需要同时兼顾照片级的精细度、创意野心,以及适配社交媒体的交付速度。随着他们为 Crocs、NFL 和 Dick\u2019s Sporting Goods 等客户的工作扩展到 AI 视频、AR、VR 和 WebGL,他们反复遇到同一个挑战:用商业项目的工期和预算,交付电影级的 VFX 质量。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-1.block.1': {
|
||||
en: 'For the Crocs x NFL collection holiday launch, that challenge came to a head. The brief called for hyper-realistic video of giant NFL-licensed Crocs parachuting into real Dick\u2019s Sporting Goods parking lots, across multiple locations, delivered on a fast-approaching holiday deadline. A live-action shoot plus a traditional CG pipeline was off the table.',
|
||||
'zh-CN':
|
||||
'在 Crocs x NFL 联名系列的节日上市项目中,这个挑战被推到了极致。Brief 要求制作超写实视频:巨型 NFL 授权 Crocs 鞋款跳伞落入多个真实的 Dick\u2019s Sporting Goods 停车场,并要在紧迫的节日档期前交付。实地拍摄加传统 CG 流水线的方案,已经完全行不通。'
|
||||
},
|
||||
// Topic 2: The Output
|
||||
'customers.detail.groove-jones.topic-2.label': {
|
||||
en: 'THE OUTPUT',
|
||||
'zh-CN': '交付成果'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-2.title': {
|
||||
en: 'The Output Groove Jones Achieved Using Comfy',
|
||||
'zh-CN': 'Groove Jones 借助 Comfy 实现的交付成果'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-2.block.0': {
|
||||
en: 'A full FOOH (faux out-of-home) social campaign delivered on a tight holiday deadline\nHyper-realistic videos of giant NFL-licensed Crocs parachuting onto Dick\u2019s Sporting Goods parking lots\nVertical 9:16 deliverables at 2K for Instagram Reels, TikTok, and YouTube Shorts\nSame-day iteration on client notes instead of week-long asset updates\nWinner, Aaron Awards 2024: Best AI Workflow for Production',
|
||||
'zh-CN':
|
||||
'在紧迫的节日档期内交付完整的 FOOH(虚构户外广告)社媒营销活动\n超写实视频:巨型 NFL 授权 Crocs 鞋款跳伞落入 Dick\u2019s Sporting Goods 停车场\n面向 Instagram Reels、TikTok、YouTube Shorts 的 9:16 竖屏 2K 交付物\n客户反馈当天迭代,不再需要数周的资产更新周期\n荣获 2024 年 Aaron Awards:最佳 AI 制作工作流奖'
|
||||
},
|
||||
// Topic 3: The Problem
|
||||
'customers.detail.groove-jones.topic-3.label': {
|
||||
en: 'THE PROBLEM',
|
||||
'zh-CN': '挑战'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-3.title': {
|
||||
en: 'The Problem Groove Jones Was Trying to Solve',
|
||||
'zh-CN': 'Groove Jones 试图解决的问题'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-3.block.0': {
|
||||
en: 'A traditional pipeline for this creative meant a live-action shoot at multiple store locations plus a full CG build: high-res modeling of every team\u2019s clog, look development, lighting, rendering, compositing, and a new render every time the client wanted a variation. It also meant a large crew (modelers, texture artists, lighting artists, compositors) and a schedule measured in months. Neither the budget nor the holiday window supported that path.',
|
||||
'zh-CN':
|
||||
'按照传统流水线做这个创意,意味着要在多家门店实地拍摄,加上完整的 CG 制作:每支球队鞋款的高精建模、look development、灯光、渲染、合成,客户每次想要新变体都要重新渲染。这也意味着庞大的团队(建模师、纹理师、灯光师、合成师),以及以"月"为单位的工期。无论是预算还是节日档期,都无法支撑这条路径。'
|
||||
},
|
||||
// Topic 4: How Comfy Solved the Problem
|
||||
'customers.detail.groove-jones.topic-4.label': {
|
||||
en: 'HOW COMFY SOLVED THE PROBLEM',
|
||||
'zh-CN': 'Comfy 如何解决问题'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-4.title': {
|
||||
en: 'How Groove Jones Used Comfy to Solve the Problem',
|
||||
'zh-CN': 'Groove Jones 如何用 Comfy 解决问题'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-4.block.0': {
|
||||
en: 'Groove Jones\u2019s Senior Creative Technologist, Doug Hogan, rebuilt the production process around Comfy\u2019s node-based workflow system, using their proprietary GrooveTech GenVFX pipeline. Custom LoRAs handled brand accuracy, a single Comfy graph orchestrated multiple generative models, and Nuke handled final polish. For a team with feature-film and commercial roots, the environment was immediately familiar.',
|
||||
'zh-CN':
|
||||
'Groove Jones 的高级创意技术总监 Doug Hogan 围绕 Comfy 的节点式工作流系统重新搭建了制作流程,并基于他们自研的 GrooveTech GenVFX 流水线展开。自定义 LoRA 负责保证品牌一致性,一张 Comfy 图编排多个生成模型,Nuke 负责最终精修。对于有电影和广告制作背景的团队,这套环境上手没有任何门槛。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-4.block.1.text': {
|
||||
en: 'Comfy felt very similar to working inside a traditional CG and compositing pipeline. Node-based logic, clear data flow, modular builds. It felt natural to our artists already.',
|
||||
'zh-CN':
|
||||
'Comfy 用起来非常像传统 CG 和合成流水线:节点逻辑、清晰的数据流、模块化构建。我们的艺术家用起来毫无违和感。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-4.block.1.name': {
|
||||
en: 'Doug Hogan | Senior Creative Technologist @ Groove Jones',
|
||||
'zh-CN': 'Doug Hogan | Groove Jones 高级创意技术总监'
|
||||
},
|
||||
// Topic 5: Brand-Trained LoRAs
|
||||
'customers.detail.groove-jones.topic-5.label': {
|
||||
en: 'BRAND-TRAINED LORAS',
|
||||
'zh-CN': '品牌定制 LORA'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-5.title': {
|
||||
en: 'Brand-Trained LoRAs for Hero Assets',
|
||||
'zh-CN': '为主视觉资产定制的品牌 LoRA'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-5.block.0': {
|
||||
en: 'Groove Jones trained custom LoRAs on the Crocs NFL Team Clogs and on Dick\u2019s Sporting Goods storefronts, so every generation came out anchored in brand-accurate references. Real team colorways, real product silhouettes, and real store exteriors stayed consistent across shots without per-frame correction, replacing what would normally take weeks of manual look development.',
|
||||
'zh-CN':
|
||||
'Groove Jones 基于 Crocs NFL 球队联名鞋款和 Dick\u2019s Sporting Goods 门店外景训练了定制 LoRA,让每一次生成都能锚定品牌精准的参考素材。真实的球队配色、产品轮廓和门店外观在不同镜头之间保持一致,不需要逐帧修正——而这通常意味着数周的 look development 工作量。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-5.block.1.src': {
|
||||
en: 'https://media.comfy.org/website/customers/groove-jones/nfl-crocs-team-lineup.webp',
|
||||
'zh-CN':
|
||||
'https://media.comfy.org/website/customers/groove-jones/nfl-crocs-team-lineup.webp'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-5.block.1.alt': {
|
||||
en: 'Grid of brand-accurate NFL team Crocs generated via custom LoRAs',
|
||||
'zh-CN': '通过定制 LoRA 生成的多支 NFL 球队联名 Crocs 网格'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-5.block.1.caption': {
|
||||
en: 'Brand-accurate NFL team colorways generated through custom LoRAs.',
|
||||
'zh-CN': '通过定制 LoRA 生成的、与品牌精准一致的 NFL 球队配色。'
|
||||
},
|
||||
// Topic 6: Multi-Model Orchestration
|
||||
'customers.detail.groove-jones.topic-6.label': {
|
||||
en: 'MULTI-MODEL ORCHESTRATION',
|
||||
'zh-CN': '多模型编排'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-6.title': {
|
||||
en: 'Multi-Model Orchestration in a Single Graph',
|
||||
'zh-CN': '单张图内的多模型编排'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-6.block.0': {
|
||||
en: 'The creative required different generative models at different stages: Flux for key-frame still development, Gemini Flash 2.5 (Nano Banana) for fast ideation and variants, and Veo 3.1 plus Moonvalley\u2019s Marey for final video generation. Comfy routed between all four inside one graph, so outputs from one model fed directly into the next without ever leaving the environment.',
|
||||
'zh-CN':
|
||||
'这个创意在不同阶段需要不同的生成模型:Flux 用于关键帧静帧开发,Gemini Flash 2.5(Nano Banana)用于快速构思和变体生成,Veo 3.1 加上 Moonvalley 的 Marey 用于最终的视频生成。Comfy 在一张图里就把这四个模型串起来,前一个模型的输出直接喂给下一个模型,全程无需切换环境。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-6.block.1.text': {
|
||||
en: 'The Comfy community develops at an almost exponential curve, and we were able to leverage their existing nodes and tools to solve very specific production challenges instead of reinventing the wheel ourselves.',
|
||||
'zh-CN':
|
||||
'Comfy 社区几乎是指数级增长的,我们可以直接利用社区已有的节点和工具去解决非常具体的制作问题,而不必自己重新造轮子。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-6.block.1.name': {
|
||||
en: 'Dale Carman | Co-founder @ Groove Jones',
|
||||
'zh-CN': 'Dale Carman | Groove Jones 联合创始人'
|
||||
},
|
||||
// Topic 7: The Pipeline
|
||||
'customers.detail.groove-jones.topic-7.label': {
|
||||
en: 'THE PIPELINE',
|
||||
'zh-CN': '流水线'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.title': {
|
||||
en: 'Storyboards to Previz to Final Shot in One Pipeline',
|
||||
'zh-CN': '从故事板到 Previz 再到成片,全部在一条流水线内'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.0': {
|
||||
en: 'The workflow opened with traditional storyboards for narrative approval, then moved into CGI blocking to lock composition, camera framing, and story beats. Comfy drove generation from there: the shoe drop, the parking lot reactions, the crowd coverage, and the environmental conversions that turned static summer storefronts into snow-covered holiday scenes, all inside the same graph.',
|
||||
'zh-CN':
|
||||
'工作流从传统故事板开始用于叙事确认,再进入 CGI blocking,锁定构图、镜头取景和叙事节奏。从这里开始 Comfy 接管生成:鞋款空投、停车场反应镜头、人群覆盖、把夏季静态门店外景转换成被雪覆盖的节日场景——全部在同一张图里完成。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.1.src': {
|
||||
en: 'https://media.comfy.org/website/customers/groove-jones/nfl-crocs-dicks-storyboards.webp',
|
||||
'zh-CN':
|
||||
'https://media.comfy.org/website/customers/groove-jones/nfl-crocs-dicks-storyboards.webp'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.1.alt': {
|
||||
en: 'Storyboard grid for the Crocs x NFL holiday campaign',
|
||||
'zh-CN': 'Crocs x NFL 节日营销的故事板网格'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.1.caption': {
|
||||
en: 'Grayscale storyboards used to lock narrative beats before generation.',
|
||||
'zh-CN': '在生成之前用于锁定叙事节奏的灰度故事板。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.2.src': {
|
||||
en: 'https://media.comfy.org/website/customers/groove-jones/nfl-crocs-fooh-sequence.webp',
|
||||
'zh-CN':
|
||||
'https://media.comfy.org/website/customers/groove-jones/nfl-crocs-fooh-sequence.webp'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.2.alt': {
|
||||
en: 'Composition progression from blocking to mid-render to final shot',
|
||||
'zh-CN': '从 blocking 到中间渲染再到最终镜头的构图演进'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.2.caption': {
|
||||
en: 'Composition progression: wireframe blocking, mid-render, and final shot.',
|
||||
'zh-CN': '构图演进:线框 blocking、中间渲染、最终成片。'
|
||||
},
|
||||
// Topic 8: Version Control
|
||||
'customers.detail.groove-jones.topic-8.label': {
|
||||
en: 'VERSION CONTROL',
|
||||
'zh-CN': '版本管理'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-8.title': {
|
||||
en: 'Workflow Files as Version Control',
|
||||
'zh-CN': '把工作流文件当作版本管理'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-8.block.0': {
|
||||
en: 'Every variant of every shot lived as a Comfy workflow file, which doubled as version control. When notes came in requesting a different team colorway, store exterior, or time of day, the team duplicated a branch instead of rebuilding, which made same-day iteration possible. GPU usage and API credit burn were trackable inside the same environment as the work itself, giving Production real-time visibility into compute cost per iteration.',
|
||||
'zh-CN':
|
||||
'每个镜头的每个变体都以 Comfy 工作流文件的形式存在,文件本身就是版本管理。当客户反馈要求换一支球队配色、换一个门店外景或者换一个时间段时,团队只需复制一个分支,而不是重建——这才让"当天迭代"成为可能。GPU 使用量和 API 额度消耗也都能在同一个环境里追踪到,让制作部门实时看到每次迭代的算力成本。'
|
||||
},
|
||||
// Topic 9: Finishing in Nuke
|
||||
'customers.detail.groove-jones.topic-9.label': {
|
||||
en: 'FINISHING IN NUKE',
|
||||
'zh-CN': 'Nuke 终修'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-9.title': {
|
||||
en: 'Finishing in Nuke',
|
||||
'zh-CN': '在 Nuke 中完成终修'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-9.block.0': {
|
||||
en: 'Generated shots moved into Nuke for final compositing: falling snow, camera shake, crowd ambience, holiday audio, and 2K mastering in 9:16 for Instagram Reels, TikTok, and YouTube Shorts. Because Comfy handled generation cleanly, Nuke focused on polish and motion enhancement rather than patching generative artifacts.',
|
||||
'zh-CN':
|
||||
'生成的镜头进入 Nuke 完成最终合成:飘雪、镜头抖动、人群环境音、节日氛围音效,以及面向 Instagram Reels、TikTok、YouTube Shorts 的 9:16 2K 母带。由于 Comfy 把生成环节处理得很干净,Nuke 可以专注于精修和动态增强,而不是去修补生成模型留下的瑕疵。'
|
||||
},
|
||||
// Topic 10: The Takeaway
|
||||
'customers.detail.groove-jones.topic-10.label': {
|
||||
en: 'THE TAKEAWAY',
|
||||
'zh-CN': '总结'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.title': {
|
||||
en: 'Conclusion',
|
||||
'zh-CN': '结语'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.block.0': {
|
||||
en: 'By building the FOOH pipeline inside Comfy, Groove Jones turned a brief that would have required an expensive live-action shoot plus months of CG into a fast, iterative, single-environment workflow the client could direct in real time. The project recently won the Aaron Award for Best AI Workflow for Production.',
|
||||
'zh-CN':
|
||||
'通过在 Comfy 中搭建整套 FOOH 流水线,Groove Jones 把一个原本需要昂贵实地拍摄加数月 CG 制作的项目,变成了一套高速迭代、单一环境、客户可以实时指挥的工作流。该项目近期还荣获 Aaron Award 的"最佳 AI 制作工作流"奖。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.block.1.text': {
|
||||
en: 'At Groove Jones, we care deeply about delivering work that makes people say WOW! But we also care about delivering on time and on budget. VFX projects used to operate at razor thin margins. Comfy solved that for us.',
|
||||
'zh-CN':
|
||||
'在 Groove Jones,我们非常在意交付让人说"WOW!"的作品,但我们同样在意按时按预算交付。VFX 项目以前的利润率薄得像刀刃,Comfy 帮我们彻底解决了这个问题。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.block.1.name': {
|
||||
en: 'Dale Carman | Co-founder @ Groove Jones',
|
||||
'zh-CN': 'Dale Carman | Groove Jones 联合创始人'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.block.2.label': {
|
||||
en: 'GROOVE JONES CONTRIBUTORS',
|
||||
'zh-CN': 'GROOVE JONES 贡献者'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.block.2.name': {
|
||||
en: 'TBD',
|
||||
'zh-CN': '待补充'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.block.2.role': {
|
||||
en: 'TBD',
|
||||
'zh-CN': '待补充'
|
||||
},
|
||||
|
||||
// Contact – FormSection
|
||||
'contact.form.badge': {
|
||||
en: 'CONTACT SALES',
|
||||
|
||||
3
apps/website/src/utils/marketingImage.ts
Normal file
3
apps/website/src/utils/marketingImage.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export const MARKETING_FORMATS = ['avif', 'webp'] as const
|
||||
|
||||
export const MARKETING_WIDTHS = [640, 960, 1280, 1920] as const
|
||||
111
apps/website/src/utils/video.test.ts
Normal file
111
apps/website/src/utils/video.test.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { buildVideoSources, videoKey } from './video'
|
||||
|
||||
describe('buildVideoSources', () => {
|
||||
it('builds a source per requested format', () => {
|
||||
const sources = buildVideoSources({
|
||||
name: 'hero',
|
||||
baseUrl: 'https://media.comfy.org/website/marketing',
|
||||
width: 1280,
|
||||
formats: ['webm', 'mp4']
|
||||
})
|
||||
|
||||
expect(sources).toEqual([
|
||||
{
|
||||
src: 'https://media.comfy.org/website/marketing/hero-1280.webm',
|
||||
type: 'video/webm',
|
||||
format: 'webm'
|
||||
},
|
||||
{
|
||||
src: 'https://media.comfy.org/website/marketing/hero-1280.mp4',
|
||||
type: 'video/mp4',
|
||||
format: 'mp4'
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
it('preserves caller-supplied format order', () => {
|
||||
const sources = buildVideoSources({
|
||||
name: 'clip',
|
||||
baseUrl: 'https://cdn.example.com/v',
|
||||
width: 960,
|
||||
formats: ['mp4', 'webm']
|
||||
})
|
||||
|
||||
expect(sources.map((s) => s.format)).toEqual(['mp4', 'webm'])
|
||||
})
|
||||
|
||||
it('strips a single trailing slash from baseUrl', () => {
|
||||
const sources = buildVideoSources({
|
||||
name: 'reel',
|
||||
baseUrl: 'https://media.comfy.org/website/marketing/',
|
||||
width: 1920,
|
||||
formats: ['webm']
|
||||
})
|
||||
|
||||
expect(sources[0]?.src).toBe(
|
||||
'https://media.comfy.org/website/marketing/reel-1920.webm'
|
||||
)
|
||||
})
|
||||
|
||||
it('returns an empty list when no formats are requested', () => {
|
||||
const sources = buildVideoSources({
|
||||
name: 'x',
|
||||
baseUrl: 'https://example.com',
|
||||
width: 640,
|
||||
formats: []
|
||||
})
|
||||
|
||||
expect(sources).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('videoKey', () => {
|
||||
it('changes when the source URL list changes', () => {
|
||||
const at1280 = buildVideoSources({
|
||||
name: 'hero',
|
||||
baseUrl: 'https://media.comfy.org/m',
|
||||
width: 1280,
|
||||
formats: ['webm', 'mp4']
|
||||
})
|
||||
const at640 = buildVideoSources({
|
||||
name: 'hero',
|
||||
baseUrl: 'https://media.comfy.org/m',
|
||||
width: 640,
|
||||
formats: ['webm', 'mp4']
|
||||
})
|
||||
|
||||
expect(videoKey(at1280)).not.toBe(videoKey(at640))
|
||||
})
|
||||
|
||||
it('is stable across repeated calls with the same inputs', () => {
|
||||
const args = {
|
||||
name: 'hero',
|
||||
baseUrl: 'https://media.comfy.org/m',
|
||||
width: 1280,
|
||||
formats: ['webm', 'mp4'] as const
|
||||
}
|
||||
|
||||
expect(
|
||||
videoKey(buildVideoSources({ ...args, formats: [...args.formats] }))
|
||||
).toBe(videoKey(buildVideoSources({ ...args, formats: [...args.formats] })))
|
||||
})
|
||||
|
||||
it('reflects format-order changes', () => {
|
||||
const webmFirst = buildVideoSources({
|
||||
name: 'hero',
|
||||
baseUrl: 'https://media.comfy.org/m',
|
||||
width: 1280,
|
||||
formats: ['webm', 'mp4']
|
||||
})
|
||||
const mp4First = buildVideoSources({
|
||||
name: 'hero',
|
||||
baseUrl: 'https://media.comfy.org/m',
|
||||
width: 1280,
|
||||
formats: ['mp4', 'webm']
|
||||
})
|
||||
|
||||
expect(videoKey(webmFirst)).not.toBe(videoKey(mp4First))
|
||||
})
|
||||
})
|
||||
49
apps/website/src/utils/video.ts
Normal file
49
apps/website/src/utils/video.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
/** @knipIgnoreUsedByStackedPR */
|
||||
export type VideoFormat = 'webm' | 'mp4'
|
||||
|
||||
/** @knipIgnoreUsedByStackedPR */
|
||||
export type VideoSource = {
|
||||
src: string
|
||||
type: `video/${VideoFormat}`
|
||||
format: VideoFormat
|
||||
}
|
||||
|
||||
const MIME_TYPES: Record<VideoFormat, VideoSource['type']> = {
|
||||
webm: 'video/webm',
|
||||
mp4: 'video/mp4'
|
||||
}
|
||||
|
||||
type BuildArgs = {
|
||||
name: string
|
||||
baseUrl: string
|
||||
width: number
|
||||
formats: VideoFormat[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Expects assets named `${name}-${width}.${format}` under `${baseUrl}/`,
|
||||
* matching the output of `apps/website/scripts/process-videos.sh`.
|
||||
*/
|
||||
export function buildVideoSources({
|
||||
name,
|
||||
baseUrl,
|
||||
width,
|
||||
formats
|
||||
}: BuildArgs): VideoSource[] {
|
||||
const base = baseUrl.endsWith('/') ? baseUrl.slice(0, -1) : baseUrl
|
||||
|
||||
return formats.map((format) => ({
|
||||
src: `${base}/${name}-${width}.${format}`,
|
||||
type: MIME_TYPES[format],
|
||||
format
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Stable identifier for a list of video sources, suitable as a Vue `key`.
|
||||
* Browsers do not reload a `<video>` when nested `<source>` children change;
|
||||
* keying the parent forces a remount when the source set changes.
|
||||
*/
|
||||
export function videoKey(sources: VideoSource[]): string {
|
||||
return sources.map((s) => s.src).join('|')
|
||||
}
|
||||
@@ -7,6 +7,15 @@
|
||||
"github": {
|
||||
"enabled": false
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"source": "/(.*)",
|
||||
"has": [
|
||||
{ "type": "host", "value": "website-frontend-comfyui.vercel.app" }
|
||||
],
|
||||
"headers": [{ "key": "X-Robots-Tag", "value": "index, follow" }]
|
||||
}
|
||||
],
|
||||
"redirects": [
|
||||
{
|
||||
"source": "/pricing",
|
||||
|
||||
@@ -119,7 +119,15 @@
|
||||
{ "name": "CLIP", "type": "CLIP", "links": [3, 5], "slot_index": 1 },
|
||||
{ "name": "VAE", "type": "VAE", "links": [8], "slot_index": 2 }
|
||||
],
|
||||
"properties": {},
|
||||
"properties": {
|
||||
"models": [
|
||||
{
|
||||
"name": "v1-5-pruned-emaonly-fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors",
|
||||
"directory": "checkpoints"
|
||||
}
|
||||
]
|
||||
},
|
||||
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
|
||||
}
|
||||
],
|
||||
|
||||
@@ -211,7 +211,8 @@ export const TestIds = {
|
||||
queue: {
|
||||
overlayToggle: 'queue-overlay-toggle',
|
||||
clearHistoryAction: 'clear-history-action',
|
||||
jobAssetsList: 'job-assets-list'
|
||||
jobAssetsList: 'job-assets-list',
|
||||
notificationBanner: 'queue-notification-banner'
|
||||
},
|
||||
errors: {
|
||||
imageLoadError: 'error-loading-image',
|
||||
|
||||
164
browser_tests/tests/queueNotificationBanners.spec.ts
Normal file
164
browser_tests/tests/queueNotificationBanners.spec.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
import type { Page } from '@playwright/test'
|
||||
import { expect } from '@playwright/test'
|
||||
|
||||
import { comfyPageFixture as test } from '@e2e/fixtures/ComfyPage'
|
||||
import { TestIds } from '@e2e/fixtures/selectors'
|
||||
|
||||
// Mirrors BANNER_DISMISS_DELAY_MS in src/composables/queue/useQueueNotificationBanners.ts.
|
||||
// Duplicated here to avoid pulling production source (and its litegraph
|
||||
// transitive deps) into the Playwright TS loader.
|
||||
const BANNER_DISMISS_DELAY_MS = 4000
|
||||
const BANNER_ASSERT_TIMEOUT_MS = BANNER_DISMISS_DELAY_MS + 2000
|
||||
|
||||
const REQUEST_ID_PRIMARY = 1
|
||||
const REQUEST_ID_SECONDARY = 2
|
||||
const REQUEST_ID_MISMATCH = 999
|
||||
|
||||
let nextRequestId = 1000
|
||||
const newRequestId = () => nextRequestId++
|
||||
|
||||
function bannerLocator(page: Page) {
|
||||
return page.getByTestId(TestIds.queue.notificationBanner)
|
||||
}
|
||||
|
||||
type DispatchOpts = { batchCount?: number; requestId?: number }
|
||||
|
||||
function dispatchPromptQueueing(page: Page, opts: DispatchOpts = {}) {
|
||||
return page.evaluate(
|
||||
([batchCount, requestId]) => {
|
||||
window.app!.api.dispatchCustomEvent('promptQueueing', {
|
||||
batchCount,
|
||||
requestId
|
||||
})
|
||||
},
|
||||
[opts.batchCount ?? 1, opts.requestId ?? newRequestId()]
|
||||
)
|
||||
}
|
||||
|
||||
function dispatchPromptQueued(page: Page, opts: DispatchOpts = {}) {
|
||||
return page.evaluate(
|
||||
([batchCount, requestId]) => {
|
||||
window.app!.api.dispatchCustomEvent('promptQueued', {
|
||||
number: 0,
|
||||
batchCount,
|
||||
requestId
|
||||
})
|
||||
},
|
||||
[opts.batchCount ?? 1, opts.requestId ?? newRequestId()]
|
||||
)
|
||||
}
|
||||
|
||||
test.describe('Queue notification banners', { tag: ['@ui'] }, () => {
|
||||
test.describe('Queuing lifecycle', () => {
|
||||
test('promptQueueing event shows a queueing banner', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await dispatchPromptQueueing(comfyPage.page)
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toBeVisible()
|
||||
await expect(banner).toContainText('queuing')
|
||||
})
|
||||
|
||||
test('promptQueued upgrades a pending banner to queued', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await dispatchPromptQueueing(comfyPage.page, {
|
||||
batchCount: 1,
|
||||
requestId: REQUEST_ID_PRIMARY
|
||||
})
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toContainText('queuing')
|
||||
|
||||
await dispatchPromptQueued(comfyPage.page, {
|
||||
batchCount: 1,
|
||||
requestId: REQUEST_ID_PRIMARY
|
||||
})
|
||||
|
||||
await expect(banner).toContainText('queued')
|
||||
})
|
||||
|
||||
test('promptQueued with batch count > 1 shows plural text', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await dispatchPromptQueued(comfyPage.page, { batchCount: 3 })
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toBeVisible()
|
||||
await expect(banner).toContainText('3')
|
||||
await expect(banner).toContainText('jobs added to queue')
|
||||
})
|
||||
|
||||
test('promptQueued with mismatched requestId enqueues a separate queued banner', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await dispatchPromptQueueing(comfyPage.page, {
|
||||
batchCount: 1,
|
||||
requestId: REQUEST_ID_PRIMARY
|
||||
})
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toContainText('queuing')
|
||||
|
||||
await dispatchPromptQueued(comfyPage.page, {
|
||||
batchCount: 1,
|
||||
requestId: REQUEST_ID_MISMATCH
|
||||
})
|
||||
|
||||
// Pending banner is not upgraded — still shows "queuing".
|
||||
await expect(banner).toContainText('queuing')
|
||||
|
||||
// After the pending banner auto-dismisses, the queued banner appears.
|
||||
await expect(banner).toContainText('queued', {
|
||||
timeout: BANNER_ASSERT_TIMEOUT_MS
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Auto-dismiss', () => {
|
||||
test('Banner auto-dismisses after timeout', async ({ comfyPage }) => {
|
||||
await dispatchPromptQueued(comfyPage.page)
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toBeVisible()
|
||||
await expect(banner).toBeHidden({ timeout: BANNER_ASSERT_TIMEOUT_MS })
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Notification queue (FIFO)', () => {
|
||||
test('Second notification shows after first auto-dismisses', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await dispatchPromptQueued(comfyPage.page, {
|
||||
batchCount: 1,
|
||||
requestId: REQUEST_ID_PRIMARY
|
||||
})
|
||||
await dispatchPromptQueued(comfyPage.page, {
|
||||
batchCount: 2,
|
||||
requestId: REQUEST_ID_SECONDARY
|
||||
})
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toContainText('Job queued')
|
||||
await expect(banner).toContainText('2 jobs added to queue', {
|
||||
timeout: BANNER_ASSERT_TIMEOUT_MS
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Direct queued event (no pending predecessor)', () => {
|
||||
test('promptQueued without prior queueing shows queued banner directly', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await dispatchPromptQueued(comfyPage.page, {
|
||||
batchCount: 1,
|
||||
requestId: REQUEST_ID_PRIMARY
|
||||
})
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toBeVisible()
|
||||
await expect(banner).toContainText('queued')
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -54,6 +54,9 @@ const config: KnipConfig = {
|
||||
'.github/workflows/ci-oss-assets-validation.yaml',
|
||||
// Pending integration in stacked PR
|
||||
'src/components/sidebar/tabs/nodeLibrary/CustomNodesPanel.vue',
|
||||
// Marketing media tooling — adopted by pages in a follow-up PR
|
||||
'apps/website/src/components/common/SiteVideo.vue',
|
||||
'apps/website/src/utils/marketingImage.ts',
|
||||
// Agent review check config, not part of the build
|
||||
'.agents/checks/eslint.strict.config.js',
|
||||
// Devtools extensions, included dynamically
|
||||
|
||||
177
scripts/generate-embedded-metadata-test-files.py
Normal file
177
scripts/generate-embedded-metadata-test-files.py
Normal file
@@ -0,0 +1,177 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate test fixture files for metadata parser tests.
|
||||
|
||||
Each fixture embeds the same workflow and prompt JSON, matching the
|
||||
format the ComfyUI backend uses to write metadata.
|
||||
|
||||
Prerequisites:
|
||||
source ~/ComfyUI/.venv/bin/activate
|
||||
python3 scripts/generate-embedded-metadata-test-files.py
|
||||
|
||||
Output: src/scripts/metadata/__fixtures__/
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import struct
|
||||
import subprocess
|
||||
|
||||
import av
|
||||
from PIL import Image
|
||||
|
||||
REPO_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
FIXTURES_DIR = os.path.join(REPO_ROOT, 'src', 'scripts', 'metadata', '__fixtures__')
|
||||
|
||||
WORKFLOW = {
|
||||
'nodes': [
|
||||
{
|
||||
'id': 1,
|
||||
'type': 'KSampler',
|
||||
'pos': [100, 100],
|
||||
'size': [200, 200],
|
||||
}
|
||||
]
|
||||
}
|
||||
PROMPT = {'1': {'class_type': 'KSampler', 'inputs': {}}}
|
||||
|
||||
WORKFLOW_JSON = json.dumps(WORKFLOW, separators=(',', ':'))
|
||||
PROMPT_JSON = json.dumps(PROMPT, separators=(',', ':'))
|
||||
|
||||
|
||||
def out(name: str) -> str:
|
||||
return os.path.join(FIXTURES_DIR, name)
|
||||
|
||||
|
||||
def report(name: str):
|
||||
size = os.path.getsize(out(name))
|
||||
print(f' {name} ({size} bytes)')
|
||||
|
||||
|
||||
def make_1x1_image() -> Image.Image:
|
||||
return Image.new('RGB', (1, 1), (255, 0, 0))
|
||||
|
||||
|
||||
def build_exif_bytes() -> bytes:
|
||||
"""Build EXIF bytes matching the backend's tag assignments.
|
||||
|
||||
Backend: 0x010F (Make) = "workflow:<json>", 0x0110 (Model) = "prompt:<json>"
|
||||
"""
|
||||
img = make_1x1_image()
|
||||
exif = img.getexif()
|
||||
exif[0x010F] = f'workflow:{WORKFLOW_JSON}'
|
||||
exif[0x0110] = f'prompt:{PROMPT_JSON}'
|
||||
return exif.tobytes()
|
||||
|
||||
|
||||
def inject_exif_prefix_in_webp(path: str):
|
||||
"""Prepend Exif\\0\\0 to the EXIF chunk in a WEBP file.
|
||||
|
||||
PIL always strips this prefix, so we re-inject it to test that code path.
|
||||
"""
|
||||
data = bytearray(open(path, 'rb').read())
|
||||
off = 12
|
||||
while off < len(data):
|
||||
chunk_type = data[off:off + 4]
|
||||
chunk_len = struct.unpack_from('<I', data, off + 4)[0]
|
||||
if chunk_type == b'EXIF':
|
||||
prefix = b'Exif\x00\x00'
|
||||
data[off + 8:off + 8] = prefix
|
||||
struct.pack_into('<I', data, off + 4, chunk_len + len(prefix))
|
||||
riff_size = struct.unpack_from('<I', data, 4)[0]
|
||||
struct.pack_into('<I', data, 4, riff_size + len(prefix))
|
||||
break
|
||||
off += 8 + chunk_len + (chunk_len % 2)
|
||||
with open(path, 'wb') as f:
|
||||
f.write(data)
|
||||
|
||||
|
||||
def generate_av_fixture(
|
||||
name: str,
|
||||
fmt: str,
|
||||
codec: str,
|
||||
rate: int = 44100,
|
||||
options: dict | None = None,
|
||||
):
|
||||
"""Generate an audio fixture via PyAV container.metadata[], matching the backend."""
|
||||
path = out(name)
|
||||
container = av.open(path, mode='w', format=fmt, options=options or {})
|
||||
stream = container.add_stream(codec, rate=rate)
|
||||
stream.layout = 'mono'
|
||||
|
||||
container.metadata['prompt'] = PROMPT_JSON
|
||||
container.metadata['workflow'] = WORKFLOW_JSON
|
||||
|
||||
sample_fmt = stream.codec_context.codec.audio_formats[0].name
|
||||
samples = stream.codec_context.frame_size or 1024
|
||||
frame = av.AudioFrame(format=sample_fmt, layout='mono', samples=samples)
|
||||
frame.rate = rate
|
||||
frame.pts = 0
|
||||
for packet in stream.encode(frame):
|
||||
container.mux(packet)
|
||||
for packet in stream.encode():
|
||||
container.mux(packet)
|
||||
container.close()
|
||||
report(name)
|
||||
|
||||
|
||||
def generate_webp():
|
||||
img = make_1x1_image()
|
||||
exif = build_exif_bytes()
|
||||
|
||||
img.save(out('with_metadata.webp'), 'WEBP', exif=exif)
|
||||
report('with_metadata.webp')
|
||||
|
||||
img.save(out('with_metadata_exif_prefix.webp'), 'WEBP', exif=exif)
|
||||
inject_exif_prefix_in_webp(out('with_metadata_exif_prefix.webp'))
|
||||
report('with_metadata_exif_prefix.webp')
|
||||
|
||||
|
||||
def generate_avif():
|
||||
img = make_1x1_image()
|
||||
exif = build_exif_bytes()
|
||||
img.save(out('with_metadata.avif'), 'AVIF', exif=exif)
|
||||
report('with_metadata.avif')
|
||||
|
||||
|
||||
def generate_flac():
|
||||
generate_av_fixture('with_metadata.flac', 'flac', 'flac')
|
||||
|
||||
|
||||
def generate_opus():
|
||||
generate_av_fixture('with_metadata.opus', 'opus', 'libopus', rate=48000)
|
||||
|
||||
|
||||
def generate_mp3():
|
||||
generate_av_fixture('with_metadata.mp3', 'mp3', 'libmp3lame')
|
||||
|
||||
|
||||
def generate_mp4():
|
||||
"""Generate MP4 via ffmpeg CLI with QuickTime keys/ilst metadata."""
|
||||
path = out('with_metadata.mp4')
|
||||
subprocess.run([
|
||||
'ffmpeg', '-y', '-loglevel', 'error',
|
||||
'-f', 'lavfi', '-i', 'anullsrc=r=44100:cl=mono',
|
||||
'-t', '0.01', '-c:a', 'aac', '-b:a', '32k',
|
||||
'-movflags', 'use_metadata_tags',
|
||||
'-metadata', f'prompt={PROMPT_JSON}',
|
||||
'-metadata', f'workflow={WORKFLOW_JSON}',
|
||||
path,
|
||||
], check=True)
|
||||
report('with_metadata.mp4')
|
||||
|
||||
|
||||
def generate_webm():
|
||||
generate_av_fixture('with_metadata.webm', 'webm', 'libvorbis')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Generating fixtures...')
|
||||
generate_webp()
|
||||
generate_avif()
|
||||
generate_flac()
|
||||
generate_opus()
|
||||
generate_mp3()
|
||||
generate_mp4()
|
||||
generate_webm()
|
||||
print('Done.')
|
||||
@@ -1,80 +0,0 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { assert, setAssertReporter } from '@/base/assert'
|
||||
|
||||
describe('assert', () => {
|
||||
let consoleErrorSpy: ReturnType<typeof vi.spyOn>
|
||||
|
||||
beforeEach(() => {
|
||||
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks()
|
||||
vi.unstubAllEnvs()
|
||||
setAssertReporter(() => {})
|
||||
})
|
||||
|
||||
it('does nothing when condition is true', () => {
|
||||
expect(() => assert(true, 'should not throw')).not.toThrow()
|
||||
expect(consoleErrorSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('logs console.error when condition is false', () => {
|
||||
vi.stubEnv('DEV', false)
|
||||
assert(false, 'test message')
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'[Assertion failed]: test message'
|
||||
)
|
||||
})
|
||||
|
||||
it('throws in DEV mode when condition is false', () => {
|
||||
vi.stubEnv('DEV', true)
|
||||
const reporter = vi.fn()
|
||||
setAssertReporter(reporter)
|
||||
expect(() => assert(false, 'dev error')).toThrow(
|
||||
'[Assertion failed]: dev error'
|
||||
)
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'[Assertion failed]: dev error'
|
||||
)
|
||||
expect(reporter).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does not throw in non-DEV mode when condition is false', () => {
|
||||
vi.stubEnv('DEV', false)
|
||||
expect(() => assert(false, 'non-dev error')).not.toThrow()
|
||||
})
|
||||
|
||||
it('calls registered reporter in non-DEV mode', () => {
|
||||
vi.stubEnv('DEV', false)
|
||||
const reporter = vi.fn()
|
||||
setAssertReporter(reporter)
|
||||
assert(false, 'reporter message')
|
||||
expect(reporter).toHaveBeenCalledWith('reporter message')
|
||||
})
|
||||
|
||||
it('does not call reporter when condition is true', () => {
|
||||
vi.stubEnv('DEV', false)
|
||||
const reporter = vi.fn()
|
||||
setAssertReporter(reporter)
|
||||
assert(true, 'no call')
|
||||
expect(reporter).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('swallows reporter exceptions in non-DEV mode', () => {
|
||||
vi.stubEnv('DEV', false)
|
||||
const reporter = vi.fn(() => {
|
||||
throw new Error('reporter blew up')
|
||||
})
|
||||
setAssertReporter(reporter)
|
||||
expect(() => assert(false, 'safe under reporter failure')).not.toThrow()
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'[Assertion failed]: safe under reporter failure'
|
||||
)
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
'[Assertion reporter failed]',
|
||||
expect.any(Error)
|
||||
)
|
||||
})
|
||||
})
|
||||
@@ -1,35 +0,0 @@
|
||||
type AssertReporter = (message: string) => void
|
||||
|
||||
let reporter: AssertReporter | null = null
|
||||
|
||||
/**
|
||||
* Register a reporter for assertion failures in non-DEV environments.
|
||||
* Called once at app startup by platform/ or higher layers to wire in
|
||||
* Sentry, toast notifications, etc.
|
||||
*/
|
||||
export function setAssertReporter(fn: AssertReporter): void {
|
||||
reporter = fn
|
||||
}
|
||||
|
||||
/**
|
||||
* Centralized invariant assertion.
|
||||
*
|
||||
* - Always: console.error
|
||||
* - DEV: throws (surfaces bugs immediately)
|
||||
* - Otherwise: delegates to registered reporter (Sentry, toast, etc.)
|
||||
*/
|
||||
export function assert(condition: boolean, message: string): void {
|
||||
if (condition) return
|
||||
|
||||
console.error(`[Assertion failed]: ${message}`)
|
||||
|
||||
if (import.meta.env.DEV) {
|
||||
throw new Error(`[Assertion failed]: ${message}`)
|
||||
}
|
||||
|
||||
try {
|
||||
reporter?.(message)
|
||||
} catch (error) {
|
||||
console.error('[Assertion reporter failed]', error)
|
||||
}
|
||||
}
|
||||
@@ -98,6 +98,7 @@ import { useQueueFeatureFlags } from '@/composables/queue/useQueueFeatureFlags'
|
||||
import { buildTooltipConfig } from '@/composables/useTooltipConfig'
|
||||
import { isCloud } from '@/platform/distribution/types'
|
||||
import { useSettingStore } from '@/platform/settings/settingStore'
|
||||
import { useSurveyFeatureTracking } from '@/platform/surveys/useSurveyFeatureTracking'
|
||||
import { useSidebarTabStore } from '@/stores/workspace/sidebarTabStore'
|
||||
|
||||
const emit = defineEmits<{
|
||||
@@ -107,6 +108,7 @@ const emit = defineEmits<{
|
||||
const { t } = useI18n()
|
||||
const settingStore = useSettingStore()
|
||||
const sidebarTabStore = useSidebarTabStore()
|
||||
const { trackFeatureUsed } = useSurveyFeatureTracking('queue-progress-overlay')
|
||||
|
||||
const moreTooltipConfig = computed(() => buildTooltipConfig(t('g.more')))
|
||||
const { isQueuePanelV2Enabled, isRunProgressBarEnabled } =
|
||||
@@ -119,6 +121,7 @@ const onClearHistoryFromMenu = (close: () => void) => {
|
||||
}
|
||||
|
||||
const onToggleDockedJobHistory = async (close: () => void) => {
|
||||
trackFeatureUsed()
|
||||
close()
|
||||
|
||||
try {
|
||||
@@ -138,6 +141,7 @@ const onToggleDockedJobHistory = async (close: () => void) => {
|
||||
}
|
||||
|
||||
const onToggleRunProgressBar = async () => {
|
||||
trackFeatureUsed()
|
||||
await settingStore.set(
|
||||
'Comfy.Queue.ShowRunProgressBar',
|
||||
!isRunProgressBarEnabled.value
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
role="status"
|
||||
aria-live="polite"
|
||||
aria-atomic="true"
|
||||
data-testid="queue-notification-banner"
|
||||
>
|
||||
<QueueNotificationBanner :notification="currentNotification" />
|
||||
</div>
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
:selected-sort-mode="selectedSortMode"
|
||||
:has-failed-jobs="hasFailedJobs"
|
||||
@show-assets="$emit('showAssets')"
|
||||
@update:selected-job-tab="$emit('update:selectedJobTab', $event)"
|
||||
@update:selected-job-tab="onUpdateSelectedJobTab"
|
||||
@update:selected-workflow-filter="
|
||||
$emit('update:selectedWorkflowFilter', $event)
|
||||
"
|
||||
@@ -50,6 +50,7 @@ import type {
|
||||
import type { MenuEntry } from '@/composables/queue/useJobMenu'
|
||||
import { useJobMenu } from '@/composables/queue/useJobMenu'
|
||||
import { useErrorHandling } from '@/composables/useErrorHandling'
|
||||
import { useSurveyFeatureTracking } from '@/platform/surveys/useSurveyFeatureTracking'
|
||||
|
||||
import QueueOverlayHeader from './QueueOverlayHeader.vue'
|
||||
import JobContextMenu from './job/JobContextMenu.vue'
|
||||
@@ -81,6 +82,7 @@ const emit = defineEmits<{
|
||||
const currentMenuItem = ref<JobListItem | null>(null)
|
||||
const jobContextMenuRef = ref<InstanceType<typeof JobContextMenu> | null>(null)
|
||||
const { wrapWithErrorHandlingAsync } = useErrorHandling()
|
||||
const { trackFeatureUsed } = useSurveyFeatureTracking('queue-progress-overlay')
|
||||
|
||||
const { jobMenuEntries } = useJobMenu(
|
||||
() => currentMenuItem.value,
|
||||
@@ -95,6 +97,11 @@ const onDeleteItemEvent = (item: JobListItem) => {
|
||||
emit('deleteItem', item)
|
||||
}
|
||||
|
||||
const onUpdateSelectedJobTab = (value: JobTab) => {
|
||||
trackFeatureUsed()
|
||||
emit('update:selectedJobTab', value)
|
||||
}
|
||||
|
||||
const onMenuItem = (item: JobListItem, event: Event) => {
|
||||
currentMenuItem.value = item
|
||||
jobContextMenuRef.value?.open(event)
|
||||
|
||||
@@ -66,6 +66,7 @@ import { useResultGallery } from '@/composables/queue/useResultGallery'
|
||||
import { useErrorHandling } from '@/composables/useErrorHandling'
|
||||
import { useAssetSelectionStore } from '@/platform/assets/composables/useAssetSelectionStore'
|
||||
import { isCloud } from '@/platform/distribution/types'
|
||||
import { useSurveyFeatureTracking } from '@/platform/surveys/useSurveyFeatureTracking'
|
||||
import { api } from '@/scripts/api'
|
||||
import { useAssetsStore } from '@/stores/assetsStore'
|
||||
import { useCommandStore } from '@/stores/commandStore'
|
||||
@@ -93,6 +94,7 @@ const assetsStore = useAssetsStore()
|
||||
const assetSelectionStore = useAssetSelectionStore()
|
||||
const { showQueueClearHistoryDialog } = useQueueClearHistoryDialog()
|
||||
const { wrapWithErrorHandlingAsync } = useErrorHandling()
|
||||
const { trackFeatureUsed } = useSurveyFeatureTracking('queue-progress-overlay')
|
||||
|
||||
const {
|
||||
totalPercentFormatted,
|
||||
@@ -188,6 +190,7 @@ const {
|
||||
const displayedJobGroups = computed(() => groupedJobItems.value)
|
||||
|
||||
const onCancelItem = wrapWithErrorHandlingAsync(async (item: JobListItem) => {
|
||||
trackFeatureUsed()
|
||||
const jobId = item.taskRef?.jobId
|
||||
if (!jobId) return
|
||||
|
||||
@@ -209,6 +212,7 @@ const onCancelItem = wrapWithErrorHandlingAsync(async (item: JobListItem) => {
|
||||
})
|
||||
|
||||
const onDeleteItem = wrapWithErrorHandlingAsync(async (item: JobListItem) => {
|
||||
trackFeatureUsed()
|
||||
if (!item.taskRef) return
|
||||
await queueStore.delete(item.taskRef)
|
||||
})
|
||||
@@ -224,10 +228,12 @@ const setExpanded = (expanded: boolean) => {
|
||||
}
|
||||
|
||||
const viewAllJobs = () => {
|
||||
trackFeatureUsed()
|
||||
setExpanded(true)
|
||||
}
|
||||
|
||||
const toggleAssetsSidebar = () => {
|
||||
trackFeatureUsed()
|
||||
sidebarTabStore.toggleSidebarTab('assets')
|
||||
}
|
||||
|
||||
@@ -257,12 +263,14 @@ const focusAssetInSidebar = async (item: JobListItem) => {
|
||||
|
||||
const inspectJobAsset = wrapWithErrorHandlingAsync(
|
||||
async (item: JobListItem) => {
|
||||
trackFeatureUsed()
|
||||
await openResultGallery(item)
|
||||
await focusAssetInSidebar(item)
|
||||
}
|
||||
)
|
||||
|
||||
const cancelQueuedWorkflows = wrapWithErrorHandlingAsync(async () => {
|
||||
trackFeatureUsed()
|
||||
// Capture pending jobIds before clearing
|
||||
const pendingJobIds = queueStore.pendingTasks
|
||||
.map((task) => task.jobId)
|
||||
@@ -275,6 +283,7 @@ const cancelQueuedWorkflows = wrapWithErrorHandlingAsync(async () => {
|
||||
})
|
||||
|
||||
const interruptAll = wrapWithErrorHandlingAsync(async () => {
|
||||
trackFeatureUsed()
|
||||
const tasks = queueStore.runningTasks
|
||||
const jobIds = tasks
|
||||
.map((task) => task.jobId)
|
||||
@@ -298,6 +307,7 @@ const interruptAll = wrapWithErrorHandlingAsync(async () => {
|
||||
})
|
||||
|
||||
const onClearHistoryFromMenu = () => {
|
||||
trackFeatureUsed()
|
||||
showQueueClearHistoryDialog()
|
||||
}
|
||||
</script>
|
||||
|
||||
@@ -122,6 +122,7 @@ import Button from '@/components/ui/button/Button.vue'
|
||||
import { jobSortModes } from '@/composables/queue/useJobList'
|
||||
import type { JobSortMode } from '@/composables/queue/useJobList'
|
||||
import { buildTooltipConfig } from '@/composables/useTooltipConfig'
|
||||
import { useSurveyFeatureTracking } from '@/platform/surveys/useSurveyFeatureTracking'
|
||||
|
||||
const {
|
||||
hideShowAssetsAction = false,
|
||||
@@ -147,6 +148,7 @@ const emit = defineEmits<{
|
||||
}>()
|
||||
|
||||
const { t } = useI18n()
|
||||
const { trackFeatureUsed } = useSurveyFeatureTracking('queue-progress-overlay')
|
||||
|
||||
const filterTooltipConfig = computed(() =>
|
||||
buildTooltipConfig(t('sideToolbar.queueProgressOverlay.filterBy'))
|
||||
@@ -170,6 +172,7 @@ const onSelectWorkflowFilter = (
|
||||
value: 'all' | 'current',
|
||||
close: () => void
|
||||
) => {
|
||||
trackFeatureUsed()
|
||||
selectWorkflowFilter(value)
|
||||
close()
|
||||
}
|
||||
@@ -179,6 +182,7 @@ const selectSortMode = (value: JobSortMode) => {
|
||||
}
|
||||
|
||||
const onSelectSortMode = (value: JobSortMode, close: () => void) => {
|
||||
trackFeatureUsed()
|
||||
selectSortMode(value)
|
||||
close()
|
||||
}
|
||||
|
||||
@@ -2,15 +2,16 @@
|
||||
<SidebarTabTemplate :title="$t('queue.jobHistory')">
|
||||
<template #alt-title>
|
||||
<div class="ml-auto flex shrink-0 items-center">
|
||||
<JobHistoryActionsMenu @clear-history="showQueueClearHistoryDialog" />
|
||||
<JobHistoryActionsMenu @clear-history="onClearHistory" />
|
||||
</div>
|
||||
</template>
|
||||
<template #header>
|
||||
<div class="flex flex-col gap-2 pb-1">
|
||||
<div class="px-3 py-2">
|
||||
<JobFilterTabs
|
||||
v-model:selected-job-tab="selectedJobTab"
|
||||
:selected-job-tab="selectedJobTab"
|
||||
:has-failed-jobs="hasFailedJobs"
|
||||
@update:selected-job-tab="onUpdateSelectedJobTab"
|
||||
/>
|
||||
</div>
|
||||
<JobFilterActions
|
||||
@@ -81,13 +82,14 @@ import JobHistoryActionsMenu from '@/components/queue/JobHistoryActionsMenu.vue'
|
||||
import type { MenuEntry } from '@/composables/queue/useJobMenu'
|
||||
import { useJobMenu } from '@/composables/queue/useJobMenu'
|
||||
import { useJobList } from '@/composables/queue/useJobList'
|
||||
import type { JobListItem } from '@/composables/queue/useJobList'
|
||||
import type { JobListItem, JobTab } from '@/composables/queue/useJobList'
|
||||
import { useQueueClearHistoryDialog } from '@/composables/queue/useQueueClearHistoryDialog'
|
||||
import { useResultGallery } from '@/composables/queue/useResultGallery'
|
||||
import { useErrorHandling } from '@/composables/useErrorHandling'
|
||||
import SidebarTabTemplate from '@/components/sidebar/tabs/SidebarTabTemplate.vue'
|
||||
import MediaLightbox from '@/components/sidebar/tabs/queue/MediaLightbox.vue'
|
||||
import Button from '@/components/ui/button/Button.vue'
|
||||
import { useSurveyFeatureTracking } from '@/platform/surveys/useSurveyFeatureTracking'
|
||||
import { useCommandStore } from '@/stores/commandStore'
|
||||
import { useDialogStore } from '@/stores/dialogStore'
|
||||
import { useExecutionStore } from '@/stores/executionStore'
|
||||
@@ -104,6 +106,17 @@ const executionStore = useExecutionStore()
|
||||
const queueStore = useQueueStore()
|
||||
const { showQueueClearHistoryDialog } = useQueueClearHistoryDialog()
|
||||
const { wrapWithErrorHandlingAsync } = useErrorHandling()
|
||||
const { trackFeatureUsed } = useSurveyFeatureTracking('queue-progress-overlay')
|
||||
|
||||
const onClearHistory = () => {
|
||||
trackFeatureUsed()
|
||||
showQueueClearHistoryDialog()
|
||||
}
|
||||
|
||||
const onUpdateSelectedJobTab = (value: JobTab) => {
|
||||
trackFeatureUsed()
|
||||
selectedJobTab.value = value
|
||||
}
|
||||
const {
|
||||
selectedJobTab,
|
||||
selectedWorkflowFilter,
|
||||
@@ -145,6 +158,7 @@ const activeQueueSummary = computed(() => {
|
||||
})
|
||||
|
||||
const clearQueuedWorkflows = wrapWithErrorHandlingAsync(async () => {
|
||||
trackFeatureUsed()
|
||||
const pendingJobIds = queueStore.pendingTasks
|
||||
.map((task) => task.jobId)
|
||||
.filter((id): id is string => typeof id === 'string' && id.length > 0)
|
||||
@@ -160,6 +174,7 @@ const {
|
||||
} = useResultGallery(() => filteredTasks.value)
|
||||
|
||||
const onViewItem = wrapWithErrorHandlingAsync(async (item: JobListItem) => {
|
||||
trackFeatureUsed()
|
||||
const previewOutput = item.taskRef?.previewOutput
|
||||
|
||||
if (previewOutput?.is3D) {
|
||||
@@ -194,10 +209,12 @@ const { jobMenuEntries, cancelJob } = useJobMenu(
|
||||
)
|
||||
|
||||
const onCancelItem = wrapWithErrorHandlingAsync(async (item: JobListItem) => {
|
||||
trackFeatureUsed()
|
||||
await cancelJob(item)
|
||||
})
|
||||
|
||||
const onDeleteItem = wrapWithErrorHandlingAsync(async (item: JobListItem) => {
|
||||
trackFeatureUsed()
|
||||
if (!item.taskRef) return
|
||||
await queueStore.delete(item.taskRef)
|
||||
})
|
||||
|
||||
51
src/components/toast/ProgressToastItem.test.ts
Normal file
51
src/components/toast/ProgressToastItem.test.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { render, screen } from '@testing-library/vue'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { createI18n } from 'vue-i18n'
|
||||
|
||||
import type { AssetDownload } from '@/stores/assetDownloadStore'
|
||||
|
||||
import ProgressToastItem from './ProgressToastItem.vue'
|
||||
|
||||
const i18n = createI18n({
|
||||
legacy: false,
|
||||
locale: 'en',
|
||||
messages: {
|
||||
en: {
|
||||
progressToast: {
|
||||
finished: 'Finished',
|
||||
failed: 'Failed',
|
||||
pending: 'Pending'
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
function completedJob(): AssetDownload {
|
||||
return {
|
||||
taskId: 'task-1',
|
||||
assetId: 'asset-1',
|
||||
assetName: 'controlnet-canny.safetensors',
|
||||
bytesTotal: 100,
|
||||
bytesDownloaded: 100,
|
||||
progress: 1,
|
||||
status: 'completed',
|
||||
lastUpdate: Date.now()
|
||||
}
|
||||
}
|
||||
|
||||
describe('ProgressToastItem — completed state', () => {
|
||||
it('keeps the finished badge outside the dimmed (opacity-50) subtree', () => {
|
||||
render(ProgressToastItem, {
|
||||
props: { job: completedJob() },
|
||||
global: { plugins: [i18n] }
|
||||
})
|
||||
|
||||
const badge = screen.getByText('Finished')
|
||||
// eslint-disable-next-line testing-library/no-node-access -- verifying structural placement of opacity-50 boundary, which is the subject of this fix
|
||||
expect(badge.closest('.opacity-50')).toBeNull()
|
||||
|
||||
const assetName = screen.getByText('controlnet-canny.safetensors')
|
||||
// eslint-disable-next-line testing-library/no-node-access -- verifying structural placement of opacity-50 boundary, which is the subject of this fix
|
||||
expect(assetName.closest('.opacity-50')).not.toBeNull()
|
||||
})
|
||||
})
|
||||
@@ -22,14 +22,9 @@ const isPending = computed(() => job.status === 'created')
|
||||
|
||||
<template>
|
||||
<div
|
||||
:class="
|
||||
cn(
|
||||
'flex items-center justify-between rounded-lg bg-modal-card-background px-4 py-3',
|
||||
isCompleted && 'opacity-50'
|
||||
)
|
||||
"
|
||||
class="flex items-center justify-between rounded-lg bg-modal-card-background px-4 py-3"
|
||||
>
|
||||
<div class="min-w-0 flex-1">
|
||||
<div :class="cn('min-w-0 flex-1', isCompleted && 'opacity-50')">
|
||||
<span class="block truncate text-sm text-base-foreground">{{
|
||||
job.assetName
|
||||
}}</span>
|
||||
|
||||
@@ -102,6 +102,16 @@ function createMeshModel(name = 'TestModel'): THREE.Group {
|
||||
return group
|
||||
}
|
||||
|
||||
function createPointsModel(name = 'TestModel'): THREE.Group {
|
||||
const geometry = new THREE.BufferGeometry()
|
||||
const material = new THREE.PointsMaterial({ color: 0xff0000 })
|
||||
const points = new THREE.Points(geometry, material)
|
||||
const group = new THREE.Group()
|
||||
group.name = name
|
||||
group.add(points)
|
||||
return group
|
||||
}
|
||||
|
||||
describe('SceneModelManager', () => {
|
||||
describe('constructor', () => {
|
||||
it('initializes default state', () => {
|
||||
@@ -311,6 +321,20 @@ describe('SceneModelManager', () => {
|
||||
expect(geoDispose).toHaveBeenCalled()
|
||||
expect(matDispose).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('disposes points geometry and materials', async () => {
|
||||
const { manager } = createManager()
|
||||
const model = createPointsModel()
|
||||
const points = model.children[0] as THREE.Points
|
||||
const geoDispose = vi.spyOn(points.geometry, 'dispose')
|
||||
const matDispose = vi.spyOn(points.material as THREE.Material, 'dispose')
|
||||
|
||||
await manager.setupModel(model)
|
||||
manager.clearModel()
|
||||
|
||||
expect(geoDispose).toHaveBeenCalled()
|
||||
expect(matDispose).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('reset', () => {
|
||||
|
||||
@@ -328,7 +328,7 @@ export class SceneModelManager implements ModelManagerInterface {
|
||||
this.scene.remove(obj)
|
||||
|
||||
obj.traverse((child) => {
|
||||
if (child instanceof THREE.Mesh) {
|
||||
if (child instanceof THREE.Mesh || child instanceof THREE.Points) {
|
||||
child.geometry?.dispose()
|
||||
if (Array.isArray(child.material)) {
|
||||
child.material.forEach((material) => material.dispose())
|
||||
|
||||
@@ -2700,6 +2700,12 @@
|
||||
"placeholderUnknown": "Select media...",
|
||||
"maxSelectionReached": "Maximum selection limit reached"
|
||||
},
|
||||
"remoteCombo": {
|
||||
"loading": "Loading...",
|
||||
"loadFailed": "Failed to load options",
|
||||
"playAudioPreview": "Play audio preview",
|
||||
"pauseAudioPreview": "Pause audio preview"
|
||||
},
|
||||
"valueControl": {
|
||||
"header": {
|
||||
"prefix": "Automatically update the value",
|
||||
|
||||
18
src/main.ts
18
src/main.ts
@@ -11,7 +11,6 @@ import Tooltip from 'primevue/tooltip'
|
||||
import { createApp } from 'vue'
|
||||
import { VueFire, VueFireAuth } from 'vuefire'
|
||||
|
||||
import { setAssertReporter } from '@/base/assert'
|
||||
import { getFirebaseConfig } from '@/config/firebase'
|
||||
import {
|
||||
configValueOrDefault,
|
||||
@@ -19,8 +18,6 @@ import {
|
||||
} from '@/platform/remoteConfig/remoteConfig'
|
||||
import '@/lib/litegraph/public/css/litegraph.css'
|
||||
import router from '@/router'
|
||||
import { isDesktop, isNightly } from '@/platform/distribution/types'
|
||||
import { useToastStore } from '@/platform/updates/common/toastStore'
|
||||
import { useBootstrapStore } from '@/stores/bootstrapStore'
|
||||
|
||||
import App from './App.vue'
|
||||
@@ -84,21 +81,6 @@ Sentry.init({
|
||||
defaultIntegrations: false
|
||||
})
|
||||
})
|
||||
setAssertReporter((message) => {
|
||||
if (isDesktop) {
|
||||
Sentry.captureMessage(`[Assertion failed]: ${message}`, {
|
||||
level: 'warning'
|
||||
})
|
||||
}
|
||||
if (isNightly) {
|
||||
useToastStore().add({
|
||||
severity: 'warn',
|
||||
summary: 'Assertion failed',
|
||||
detail: message
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
app.directive('tooltip', Tooltip)
|
||||
app
|
||||
.use(router)
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import type { AssetItem } from '@/platform/assets/schemas/assetSchema'
|
||||
import { assetService } from '@/platform/assets/services/assetService'
|
||||
import {
|
||||
MISSING_TAG,
|
||||
assetService,
|
||||
isBlake3AssetHash,
|
||||
toBlake3AssetHash
|
||||
} from '@/platform/assets/services/assetService'
|
||||
import { api } from '@/scripts/api'
|
||||
|
||||
const mockDistributionState = vi.hoisted(() => ({ isCloud: false }))
|
||||
@@ -44,6 +49,10 @@ vi.mock('@/i18n', () => ({
|
||||
|
||||
const fetchApiMock = vi.mocked(api.fetchApi)
|
||||
|
||||
const validBlake3Hash =
|
||||
'1111111111111111111111111111111111111111111111111111111111111111'
|
||||
const validBlake3AssetHash = `blake3:${validBlake3Hash}`
|
||||
|
||||
function buildResponse(
|
||||
body: unknown,
|
||||
init: { ok?: boolean; status?: number } = {}
|
||||
@@ -180,9 +189,98 @@ describe(assetService.getAssetMetadata, () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe(isBlake3AssetHash, () => {
|
||||
it('accepts only prefixed 64-character blake3 hashes', () => {
|
||||
expect(isBlake3AssetHash(validBlake3AssetHash)).toBe(true)
|
||||
expect(isBlake3AssetHash('BLAKE3:' + validBlake3Hash.toUpperCase())).toBe(
|
||||
true
|
||||
)
|
||||
expect(isBlake3AssetHash('blake3:abc')).toBe(false)
|
||||
expect(isBlake3AssetHash(validBlake3Hash)).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe(toBlake3AssetHash, () => {
|
||||
it('normalizes 64-character blake3 hex values to asset hashes', () => {
|
||||
expect(toBlake3AssetHash(validBlake3Hash)).toBe(validBlake3AssetHash)
|
||||
expect(toBlake3AssetHash('abc')).toBeNull()
|
||||
expect(toBlake3AssetHash(undefined)).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe(assetService.uploadAssetFromUrl, () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
assetService.invalidateInputAssetsIncludingPublic()
|
||||
})
|
||||
|
||||
it('does not invalidate cached input assets when the upload response is invalid', async () => {
|
||||
const staleAssets = [validAsset({ id: 'stale-input', tags: ['input'] })]
|
||||
const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
fetchApiMock
|
||||
.mockResolvedValueOnce(buildResponse({ assets: staleAssets }))
|
||||
.mockResolvedValueOnce(buildResponse({ id: 'missing-name' }))
|
||||
|
||||
await assetService.getInputAssetsIncludingPublic()
|
||||
await expect(
|
||||
assetService.uploadAssetFromUrl({
|
||||
url: 'https://example.com/input.png',
|
||||
name: 'input.png',
|
||||
tags: ['input']
|
||||
})
|
||||
).rejects.toThrow('Failed to upload asset')
|
||||
const cached = await assetService.getInputAssetsIncludingPublic()
|
||||
|
||||
expect(cached).toEqual(staleAssets)
|
||||
expect(fetchApiMock).toHaveBeenCalledTimes(2)
|
||||
consoleSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('requires upload responses to include created_new', async () => {
|
||||
const staleAssets = [validAsset({ id: 'stale-input', tags: ['input'] })]
|
||||
const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
fetchApiMock
|
||||
.mockResolvedValueOnce(buildResponse({ assets: staleAssets }))
|
||||
.mockResolvedValueOnce(
|
||||
buildResponse(validAsset({ id: 'uploaded-input', tags: ['input'] }))
|
||||
)
|
||||
|
||||
await assetService.getInputAssetsIncludingPublic()
|
||||
await expect(
|
||||
assetService.uploadAssetFromUrl({
|
||||
url: 'https://example.com/input.png',
|
||||
name: 'input.png',
|
||||
tags: ['input']
|
||||
})
|
||||
).rejects.toThrow('Failed to upload asset')
|
||||
const cached = await assetService.getInputAssetsIncludingPublic()
|
||||
|
||||
expect(cached).toEqual(staleAssets)
|
||||
expect(fetchApiMock).toHaveBeenCalledTimes(2)
|
||||
consoleSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('returns validated upload responses with created_new', async () => {
|
||||
const uploadedAsset = {
|
||||
...validAsset({ id: 'uploaded-input', tags: ['input'] }),
|
||||
created_new: true
|
||||
}
|
||||
fetchApiMock.mockResolvedValueOnce(buildResponse(uploadedAsset))
|
||||
|
||||
await expect(
|
||||
assetService.uploadAssetFromUrl({
|
||||
url: 'https://example.com/input.png',
|
||||
name: 'input.png',
|
||||
tags: ['input']
|
||||
})
|
||||
).resolves.toEqual(uploadedAsset)
|
||||
})
|
||||
})
|
||||
|
||||
describe(assetService.uploadAssetFromBase64, () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
assetService.invalidateInputAssetsIncludingPublic()
|
||||
})
|
||||
|
||||
it('throws before calling the network when data is not a data URL', async () => {
|
||||
@@ -195,6 +293,63 @@ describe(assetService.uploadAssetFromBase64, () => {
|
||||
|
||||
expect(fetchApiMock).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does not invalidate cached input assets when the upload response is invalid', async () => {
|
||||
const staleAssets = [validAsset({ id: 'stale-input', tags: ['input'] })]
|
||||
const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const fetchSpy = vi
|
||||
.spyOn(globalThis, 'fetch')
|
||||
.mockResolvedValueOnce(new Response('hello'))
|
||||
fetchApiMock
|
||||
.mockResolvedValueOnce(buildResponse({ assets: staleAssets }))
|
||||
.mockResolvedValueOnce(buildResponse({ id: 'missing-name' }))
|
||||
|
||||
await assetService.getInputAssetsIncludingPublic()
|
||||
await expect(
|
||||
assetService.uploadAssetFromBase64({
|
||||
data: 'data:text/plain;base64,aGVsbG8=',
|
||||
name: 'input.txt',
|
||||
tags: ['input']
|
||||
})
|
||||
).rejects.toThrow('Failed to upload asset')
|
||||
const cached = await assetService.getInputAssetsIncludingPublic()
|
||||
|
||||
expect(cached).toEqual(staleAssets)
|
||||
expect(fetchApiMock).toHaveBeenCalledTimes(2)
|
||||
fetchSpy.mockRestore()
|
||||
consoleSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('rejects upload responses with a non-boolean created_new', async () => {
|
||||
const staleAssets = [validAsset({ id: 'stale-input', tags: ['input'] })]
|
||||
const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const fetchSpy = vi
|
||||
.spyOn(globalThis, 'fetch')
|
||||
.mockResolvedValueOnce(new Response('hello'))
|
||||
fetchApiMock
|
||||
.mockResolvedValueOnce(buildResponse({ assets: staleAssets }))
|
||||
.mockResolvedValueOnce(
|
||||
buildResponse({
|
||||
...validAsset({ id: 'uploaded-input', tags: ['input'] }),
|
||||
created_new: 'true'
|
||||
})
|
||||
)
|
||||
|
||||
await assetService.getInputAssetsIncludingPublic()
|
||||
await expect(
|
||||
assetService.uploadAssetFromBase64({
|
||||
data: 'data:text/plain;base64,aGVsbG8=',
|
||||
name: 'input.txt',
|
||||
tags: ['input']
|
||||
})
|
||||
).rejects.toThrow('Failed to upload asset')
|
||||
const cached = await assetService.getInputAssetsIncludingPublic()
|
||||
|
||||
expect(cached).toEqual(staleAssets)
|
||||
expect(fetchApiMock).toHaveBeenCalledTimes(2)
|
||||
fetchSpy.mockRestore()
|
||||
consoleSpy.mockRestore()
|
||||
})
|
||||
})
|
||||
|
||||
describe(assetService.uploadAssetAsync, () => {
|
||||
@@ -354,3 +509,391 @@ describe(assetService.getAssetsByTag, () => {
|
||||
expect(params.get('include_public')).toBe('true')
|
||||
})
|
||||
})
|
||||
|
||||
describe(assetService.getAllAssetsByTag, () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('paginates tagged asset requests with include_public=true', async () => {
|
||||
fetchApiMock
|
||||
.mockResolvedValueOnce(
|
||||
buildResponse({
|
||||
assets: [
|
||||
validAsset({ id: 'a', tags: ['input'] }),
|
||||
validAsset({ id: 'b', tags: ['input'] })
|
||||
]
|
||||
})
|
||||
)
|
||||
.mockResolvedValueOnce(
|
||||
buildResponse({
|
||||
assets: [validAsset({ id: 'c', tags: ['input'] })]
|
||||
})
|
||||
)
|
||||
|
||||
const assets = await assetService.getAllAssetsByTag('input', true, {
|
||||
limit: 2
|
||||
})
|
||||
|
||||
expect(assets.map((a) => a.id)).toEqual(['a', 'b', 'c'])
|
||||
|
||||
const firstUrl = fetchApiMock.mock.calls[0]?.[0] as string
|
||||
const firstParams = new URL(firstUrl, 'http://localhost').searchParams
|
||||
expect(firstParams.get('include_public')).toBe('true')
|
||||
expect(firstParams.get('limit')).toBe('2')
|
||||
expect(firstParams.has('offset')).toBe(false)
|
||||
|
||||
const secondUrl = fetchApiMock.mock.calls[1]?.[0] as string
|
||||
const secondParams = new URL(secondUrl, 'http://localhost').searchParams
|
||||
expect(secondParams.get('include_public')).toBe('true')
|
||||
expect(secondParams.get('limit')).toBe('2')
|
||||
expect(secondParams.get('offset')).toBe('2')
|
||||
})
|
||||
|
||||
it('paginates from raw response size before filtering missing-tagged assets', async () => {
|
||||
fetchApiMock
|
||||
.mockResolvedValueOnce(
|
||||
buildResponse({
|
||||
assets: [
|
||||
validAsset({ id: 'visible', tags: ['input'] }),
|
||||
validAsset({ id: 'hidden', tags: ['input', MISSING_TAG] })
|
||||
]
|
||||
})
|
||||
)
|
||||
.mockResolvedValueOnce(
|
||||
buildResponse({
|
||||
assets: [validAsset({ id: 'later-public', tags: ['input'] })]
|
||||
})
|
||||
)
|
||||
|
||||
const assets = await assetService.getAllAssetsByTag('input', true, {
|
||||
limit: 2
|
||||
})
|
||||
|
||||
expect(assets.map((a) => a.id)).toEqual(['visible', 'later-public'])
|
||||
expect(fetchApiMock).toHaveBeenCalledTimes(2)
|
||||
|
||||
const secondUrl = fetchApiMock.mock.calls[1]?.[0]
|
||||
if (typeof secondUrl !== 'string') {
|
||||
throw new Error('Expected a second asset request URL')
|
||||
}
|
||||
const secondParams = new URL(secondUrl, 'http://localhost').searchParams
|
||||
expect(secondParams.get('offset')).toBe('2')
|
||||
})
|
||||
|
||||
it('honors has_more when walking tagged asset pages', async () => {
|
||||
fetchApiMock
|
||||
.mockResolvedValueOnce(
|
||||
buildResponse({
|
||||
assets: [
|
||||
validAsset({ id: 'first', tags: ['input'] }),
|
||||
validAsset({ id: 'second', tags: ['input'] })
|
||||
],
|
||||
has_more: true
|
||||
})
|
||||
)
|
||||
.mockResolvedValueOnce(
|
||||
buildResponse({
|
||||
assets: [validAsset({ id: 'later-public', tags: ['input'] })],
|
||||
has_more: false
|
||||
})
|
||||
)
|
||||
|
||||
const assets = await assetService.getAllAssetsByTag('input', true, {
|
||||
limit: 3
|
||||
})
|
||||
|
||||
expect(assets.map((a) => a.id)).toEqual(['first', 'second', 'later-public'])
|
||||
expect(fetchApiMock).toHaveBeenCalledTimes(2)
|
||||
|
||||
const secondUrl = fetchApiMock.mock.calls[1]?.[0]
|
||||
if (typeof secondUrl !== 'string') {
|
||||
throw new Error('Expected a second asset request URL')
|
||||
}
|
||||
const secondParams = new URL(secondUrl, 'http://localhost').searchParams
|
||||
expect(secondParams.get('offset')).toBe('2')
|
||||
})
|
||||
|
||||
it('passes abort signals through paginated requests', async () => {
|
||||
const controller = new AbortController()
|
||||
fetchApiMock.mockResolvedValueOnce(
|
||||
buildResponse({
|
||||
assets: [validAsset({ id: 'a', tags: ['input'] })]
|
||||
})
|
||||
)
|
||||
|
||||
await assetService.getAllAssetsByTag('input', true, {
|
||||
limit: 2,
|
||||
signal: controller.signal
|
||||
})
|
||||
|
||||
expect(fetchApiMock).toHaveBeenCalledWith(expect.any(String), {
|
||||
signal: controller.signal
|
||||
})
|
||||
})
|
||||
|
||||
it('stops pagination when aborted between pages', async () => {
|
||||
const controller = new AbortController()
|
||||
fetchApiMock.mockImplementationOnce(async () => {
|
||||
controller.abort()
|
||||
return buildResponse({
|
||||
assets: [
|
||||
validAsset({ id: 'a', tags: ['input'] }),
|
||||
validAsset({ id: 'b', tags: ['input'] })
|
||||
]
|
||||
})
|
||||
})
|
||||
|
||||
await expect(
|
||||
assetService.getAllAssetsByTag('input', true, {
|
||||
limit: 2,
|
||||
signal: controller.signal
|
||||
})
|
||||
).rejects.toMatchObject({ name: 'AbortError' })
|
||||
|
||||
expect(fetchApiMock).toHaveBeenCalledOnce()
|
||||
})
|
||||
})
|
||||
|
||||
describe(assetService.getInputAssetsIncludingPublic, () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
assetService.invalidateInputAssetsIncludingPublic()
|
||||
})
|
||||
|
||||
it('loads input assets with public assets included and reuses the cache', async () => {
|
||||
const assets = [
|
||||
validAsset({ id: 'user-input', tags: ['input'] }),
|
||||
validAsset({ id: 'public-input', tags: ['input'], is_immutable: true })
|
||||
]
|
||||
fetchApiMock.mockResolvedValueOnce(buildResponse({ assets }))
|
||||
|
||||
const first = await assetService.getInputAssetsIncludingPublic()
|
||||
const second = await assetService.getInputAssetsIncludingPublic()
|
||||
|
||||
expect(first).toEqual(assets)
|
||||
expect(second).toBe(first)
|
||||
expect(fetchApiMock).toHaveBeenCalledOnce()
|
||||
|
||||
const requestedUrl = fetchApiMock.mock.calls[0]?.[0] as string
|
||||
const params = new URL(requestedUrl, 'http://localhost').searchParams
|
||||
expect(params.get('include_public')).toBe('true')
|
||||
expect(params.get('limit')).toBe('500')
|
||||
})
|
||||
|
||||
it('fetches fresh input assets after explicit invalidation', async () => {
|
||||
const staleAssets = [validAsset({ id: 'stale-input', tags: ['input'] })]
|
||||
const freshAssets = [validAsset({ id: 'fresh-input', tags: ['input'] })]
|
||||
fetchApiMock
|
||||
.mockResolvedValueOnce(buildResponse({ assets: staleAssets }))
|
||||
.mockResolvedValueOnce(buildResponse({ assets: freshAssets }))
|
||||
|
||||
await assetService.getInputAssetsIncludingPublic()
|
||||
assetService.invalidateInputAssetsIncludingPublic()
|
||||
const refreshed = await assetService.getInputAssetsIncludingPublic()
|
||||
|
||||
expect(refreshed).toEqual(freshAssets)
|
||||
expect(fetchApiMock).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('does not let one caller abort the shared input asset load for other callers', async () => {
|
||||
const firstController = new AbortController()
|
||||
const secondController = new AbortController()
|
||||
const assets = [validAsset({ id: 'public-input', tags: ['input'] })]
|
||||
let resolveResponse!: (response: Response) => void
|
||||
let serviceSignal: AbortSignal | undefined
|
||||
fetchApiMock.mockImplementationOnce(async (_url, options) => {
|
||||
serviceSignal = options?.signal ?? undefined
|
||||
return await new Promise<Response>((resolve) => {
|
||||
resolveResponse = resolve
|
||||
})
|
||||
})
|
||||
|
||||
const first = assetService.getInputAssetsIncludingPublic(
|
||||
firstController.signal
|
||||
)
|
||||
const second = assetService.getInputAssetsIncludingPublic(
|
||||
secondController.signal
|
||||
)
|
||||
firstController.abort()
|
||||
|
||||
await expect(first).rejects.toMatchObject({ name: 'AbortError' })
|
||||
expect(serviceSignal).toBeUndefined()
|
||||
|
||||
resolveResponse(buildResponse({ assets }))
|
||||
|
||||
await expect(second).resolves.toEqual(assets)
|
||||
expect(fetchApiMock).toHaveBeenCalledOnce()
|
||||
})
|
||||
|
||||
it('keeps the shared input asset load alive after all callers abort', async () => {
|
||||
const firstController = new AbortController()
|
||||
const secondController = new AbortController()
|
||||
const assets = [validAsset({ id: 'public-input', tags: ['input'] })]
|
||||
let resolveResponse!: (response: Response) => void
|
||||
fetchApiMock.mockImplementationOnce(
|
||||
async () =>
|
||||
await new Promise<Response>((resolve) => {
|
||||
resolveResponse = resolve
|
||||
})
|
||||
)
|
||||
|
||||
const first = assetService.getInputAssetsIncludingPublic(
|
||||
firstController.signal
|
||||
)
|
||||
const second = assetService.getInputAssetsIncludingPublic(
|
||||
secondController.signal
|
||||
)
|
||||
firstController.abort()
|
||||
secondController.abort()
|
||||
|
||||
await expect(first).rejects.toMatchObject({ name: 'AbortError' })
|
||||
await expect(second).rejects.toMatchObject({ name: 'AbortError' })
|
||||
|
||||
resolveResponse(buildResponse({ assets }))
|
||||
await Promise.resolve()
|
||||
|
||||
await expect(assetService.getInputAssetsIncludingPublic()).resolves.toEqual(
|
||||
assets
|
||||
)
|
||||
expect(fetchApiMock).toHaveBeenCalledOnce()
|
||||
})
|
||||
|
||||
it('does not abort in-flight input asset loads when invalidated', async () => {
|
||||
const assets = [validAsset({ id: 'stale-input', tags: ['input'] })]
|
||||
const freshAssets = [validAsset({ id: 'fresh-input', tags: ['input'] })]
|
||||
let resolveResponse!: (response: Response) => void
|
||||
fetchApiMock
|
||||
.mockImplementationOnce(
|
||||
async () =>
|
||||
await new Promise<Response>((resolve) => {
|
||||
resolveResponse = resolve
|
||||
})
|
||||
)
|
||||
.mockResolvedValueOnce(buildResponse({ assets: freshAssets }))
|
||||
|
||||
const inFlight = assetService.getInputAssetsIncludingPublic()
|
||||
assetService.invalidateInputAssetsIncludingPublic()
|
||||
|
||||
resolveResponse(buildResponse({ assets }))
|
||||
|
||||
await expect(inFlight).resolves.toEqual(assets)
|
||||
await expect(assetService.getInputAssetsIncludingPublic()).resolves.toEqual(
|
||||
freshAssets
|
||||
)
|
||||
expect(fetchApiMock).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('invalidates cached input assets after deleting an asset', async () => {
|
||||
const staleAssets = [validAsset({ id: 'stale-input', tags: ['input'] })]
|
||||
const freshAssets = [validAsset({ id: 'fresh-input', tags: ['input'] })]
|
||||
fetchApiMock
|
||||
.mockResolvedValueOnce(buildResponse({ assets: staleAssets }))
|
||||
.mockResolvedValueOnce(buildResponse(null))
|
||||
.mockResolvedValueOnce(buildResponse({ assets: freshAssets }))
|
||||
|
||||
await assetService.getInputAssetsIncludingPublic()
|
||||
await assetService.deleteAsset('stale-input')
|
||||
const refreshed = await assetService.getInputAssetsIncludingPublic()
|
||||
|
||||
expect(refreshed).toEqual(freshAssets)
|
||||
expect(fetchApiMock).toHaveBeenCalledTimes(3)
|
||||
expect(fetchApiMock.mock.calls[1]).toEqual([
|
||||
'/assets/stale-input',
|
||||
expect.objectContaining({ method: 'DELETE' })
|
||||
])
|
||||
})
|
||||
|
||||
it('invalidates cached input assets after an input asset upload', async () => {
|
||||
const staleAssets = [validAsset({ id: 'stale-input', tags: ['input'] })]
|
||||
const uploadedAsset = validAsset({ id: 'uploaded-input', tags: ['input'] })
|
||||
const freshAssets = [uploadedAsset]
|
||||
fetchApiMock
|
||||
.mockResolvedValueOnce(buildResponse({ assets: staleAssets }))
|
||||
.mockResolvedValueOnce(buildResponse(uploadedAsset))
|
||||
.mockResolvedValueOnce(buildResponse({ assets: freshAssets }))
|
||||
|
||||
await assetService.getInputAssetsIncludingPublic()
|
||||
await assetService.uploadAssetAsync({
|
||||
source_url: 'https://example.com/input.png',
|
||||
tags: ['input']
|
||||
})
|
||||
const refreshed = await assetService.getInputAssetsIncludingPublic()
|
||||
|
||||
expect(refreshed).toEqual(freshAssets)
|
||||
expect(fetchApiMock).toHaveBeenCalledTimes(3)
|
||||
})
|
||||
|
||||
it('does not invalidate cached input assets for pending async input uploads', async () => {
|
||||
const staleAssets = [validAsset({ id: 'stale-input', tags: ['input'] })]
|
||||
fetchApiMock
|
||||
.mockResolvedValueOnce(buildResponse({ assets: staleAssets }))
|
||||
.mockResolvedValueOnce(
|
||||
buildResponse(
|
||||
{ task_id: 'task-1', status: 'running' },
|
||||
{ ok: true, status: 202 }
|
||||
)
|
||||
)
|
||||
|
||||
await assetService.getInputAssetsIncludingPublic()
|
||||
await assetService.uploadAssetAsync({
|
||||
source_url: 'https://example.com/input.png',
|
||||
tags: ['input']
|
||||
})
|
||||
const cached = await assetService.getInputAssetsIncludingPublic()
|
||||
|
||||
expect(cached).toEqual(staleAssets)
|
||||
expect(fetchApiMock).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('does not invalidate cached input assets for non-input uploads', async () => {
|
||||
const staleAssets = [validAsset({ id: 'stale-input', tags: ['input'] })]
|
||||
fetchApiMock
|
||||
.mockResolvedValueOnce(buildResponse({ assets: staleAssets }))
|
||||
.mockResolvedValueOnce(buildResponse(validAsset({ tags: ['models'] })))
|
||||
|
||||
await assetService.getInputAssetsIncludingPublic()
|
||||
await assetService.uploadAssetAsync({
|
||||
source_url: 'https://example.com/model.safetensors',
|
||||
tags: ['models']
|
||||
})
|
||||
const cached = await assetService.getInputAssetsIncludingPublic()
|
||||
|
||||
expect(cached).toEqual(staleAssets)
|
||||
expect(fetchApiMock).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
})
|
||||
|
||||
describe(assetService.checkAssetHash, () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it.each([
|
||||
[200, 'exists'],
|
||||
[404, 'missing'],
|
||||
[400, 'invalid']
|
||||
] as const)('maps %s responses to %s', async (status, expected) => {
|
||||
const hash =
|
||||
'blake3:1111111111111111111111111111111111111111111111111111111111111111'
|
||||
fetchApiMock.mockResolvedValueOnce(buildResponse(null, { status }))
|
||||
|
||||
await expect(assetService.checkAssetHash(hash)).resolves.toBe(expected)
|
||||
|
||||
expect(fetchApiMock).toHaveBeenCalledWith(
|
||||
`/assets/hash/${encodeURIComponent(hash)}`,
|
||||
{
|
||||
method: 'HEAD',
|
||||
signal: undefined
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('throws for unexpected responses', async () => {
|
||||
fetchApiMock.mockResolvedValueOnce(buildResponse(null, { status: 500 }))
|
||||
|
||||
await expect(assetService.checkAssetHash('blake3:abc')).rejects.toThrow(
|
||||
'Unexpected asset hash check status: 500'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { fromZodError } from 'zod-validation-error'
|
||||
import { z } from 'zod'
|
||||
|
||||
import { st } from '@/i18n'
|
||||
|
||||
@@ -29,9 +30,14 @@ export interface PaginationOptions {
|
||||
offset?: number
|
||||
}
|
||||
|
||||
interface AssetPaginationOptions extends PaginationOptions {
|
||||
signal?: AbortSignal
|
||||
}
|
||||
|
||||
interface AssetRequestOptions extends PaginationOptions {
|
||||
includeTags: string[]
|
||||
includePublic?: boolean
|
||||
signal?: AbortSignal
|
||||
}
|
||||
|
||||
interface AssetExportOptions {
|
||||
@@ -170,10 +176,61 @@ const ASSETS_DOWNLOAD_ENDPOINT = '/assets/download'
|
||||
const ASSETS_EXPORT_ENDPOINT = '/assets/export'
|
||||
const EXPERIMENTAL_WARNING = `EXPERIMENTAL: If you are seeing this please make sure "Comfy.Assets.UseAssetAPI" is set to "false" in your ComfyUI Settings.\n`
|
||||
const DEFAULT_LIMIT = 500
|
||||
const INPUT_ASSETS_WITH_PUBLIC_LIMIT = 500
|
||||
|
||||
export const MODELS_TAG = 'models'
|
||||
/** Asset tag used by the backend for placeholder records that are not installed. */
|
||||
export const MISSING_TAG = 'missing'
|
||||
|
||||
/** Result of a HEAD lookup against an exact asset hash. */
|
||||
export type AssetHashStatus = 'exists' | 'missing' | 'invalid'
|
||||
|
||||
const BLAKE3_ASSET_HASH_PATTERN = /^blake3:[0-9a-f]{64}$/i
|
||||
const BLAKE3_HEX_PATTERN = /^[0-9a-f]{64}$/i
|
||||
const uploadedAssetResponseSchema = assetItemSchema.extend({
|
||||
created_new: z.boolean()
|
||||
})
|
||||
|
||||
/** Returns true for a prefixed BLAKE3 asset hash: `blake3:<64 hex>`. */
|
||||
export function isBlake3AssetHash(value: string): boolean {
|
||||
return BLAKE3_ASSET_HASH_PATTERN.test(value)
|
||||
}
|
||||
|
||||
/** Converts a raw 64-character BLAKE3 hex digest into an asset hash. */
|
||||
export function toBlake3AssetHash(hash: string | undefined): string | null {
|
||||
if (!hash || !BLAKE3_HEX_PATTERN.test(hash)) return null
|
||||
return `blake3:${hash}`
|
||||
}
|
||||
|
||||
function createAbortError(): DOMException {
|
||||
return new DOMException('Aborted', 'AbortError')
|
||||
}
|
||||
|
||||
function throwIfAborted(signal?: AbortSignal): void {
|
||||
if (signal?.aborted) throw createAbortError()
|
||||
}
|
||||
|
||||
async function withCallerAbort<T>(
|
||||
promise: Promise<T>,
|
||||
signal?: AbortSignal
|
||||
): Promise<T> {
|
||||
throwIfAborted(signal)
|
||||
if (!signal) return await promise
|
||||
|
||||
let removeAbortListener = () => {}
|
||||
const abortPromise = new Promise<never>((_, reject) => {
|
||||
const onAbort = () => reject(createAbortError())
|
||||
signal.addEventListener('abort', onAbort, { once: true })
|
||||
removeAbortListener = () => signal.removeEventListener('abort', onAbort)
|
||||
})
|
||||
|
||||
try {
|
||||
return await Promise.race([promise, abortPromise])
|
||||
} finally {
|
||||
removeAbortListener()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates asset response data using Zod schema
|
||||
*/
|
||||
@@ -187,11 +244,43 @@ function validateAssetResponse(data: unknown): AssetResponse {
|
||||
)
|
||||
}
|
||||
|
||||
function validateUploadedAssetResponse(
|
||||
data: unknown
|
||||
): AssetItem & { created_new: boolean } {
|
||||
const result = uploadedAssetResponseSchema.safeParse(data)
|
||||
if (result.success) {
|
||||
return result.data
|
||||
}
|
||||
|
||||
console.error('Invalid asset upload response:', fromZodError(result.error))
|
||||
throw new Error(
|
||||
st(
|
||||
'assetBrowser.errorUploadFailed',
|
||||
'Failed to upload asset. Please try again.'
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Private service for asset-related network requests
|
||||
* Not exposed globally - used internally by ComfyApi
|
||||
*/
|
||||
function createAssetService() {
|
||||
let inputAssetsIncludingPublic: AssetItem[] | null = null
|
||||
let inputAssetsIncludingPublicRequestId = 0
|
||||
let pendingInputAssetsIncludingPublic: Promise<AssetItem[]> | null = null
|
||||
|
||||
/** Invalidates the cached public-inclusive input assets without aborting in-flight readers. */
|
||||
function invalidateInputAssetsIncludingPublic(): void {
|
||||
inputAssetsIncludingPublicRequestId++
|
||||
pendingInputAssetsIncludingPublic = null
|
||||
inputAssetsIncludingPublic = null
|
||||
}
|
||||
|
||||
function invalidateInputAssetsCacheIfNeeded(tags?: string[]): void {
|
||||
if (tags?.includes('input')) invalidateInputAssetsIncludingPublic()
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles API response with consistent error handling and Zod validation
|
||||
*/
|
||||
@@ -203,7 +292,8 @@ function createAssetService() {
|
||||
includeTags,
|
||||
limit = DEFAULT_LIMIT,
|
||||
offset,
|
||||
includePublic
|
||||
includePublic,
|
||||
signal
|
||||
} = options
|
||||
const queryParams = new URLSearchParams({
|
||||
include_tags: includeTags.join(','),
|
||||
@@ -217,7 +307,9 @@ function createAssetService() {
|
||||
}
|
||||
|
||||
const url = `${ASSETS_ENDPOINT}?${queryParams.toString()}`
|
||||
const res = await api.fetchApi(url)
|
||||
const res = signal
|
||||
? await api.fetchApi(url, { signal })
|
||||
: await api.fetchApi(url)
|
||||
if (!res.ok) {
|
||||
throw new Error(
|
||||
`${EXPERIMENTAL_WARNING}Unable to load ${context}: Server returned ${res.status}. Please try again.`
|
||||
@@ -403,15 +495,16 @@ function createAssetService() {
|
||||
* @param options - Pagination options
|
||||
* @param options.limit - Maximum number of assets to return (default: 500)
|
||||
* @param options.offset - Number of assets to skip (default: 0)
|
||||
* @param options.signal - Optional abort signal for cancelling the request
|
||||
* @returns Promise<AssetItem[]> - Full asset objects filtered by tag, excluding missing assets
|
||||
*/
|
||||
async function getAssetsByTag(
|
||||
tag: string,
|
||||
includePublic: boolean = true,
|
||||
{ limit = DEFAULT_LIMIT, offset = 0 }: PaginationOptions = {}
|
||||
{ limit = DEFAULT_LIMIT, offset = 0, signal }: AssetPaginationOptions = {}
|
||||
): Promise<AssetItem[]> {
|
||||
const data = await handleAssetRequest(
|
||||
{ includeTags: [tag], limit, offset, includePublic },
|
||||
{ includeTags: [tag], limit, offset, includePublic, signal },
|
||||
`assets for tag ${tag}`
|
||||
)
|
||||
|
||||
@@ -420,6 +513,116 @@ function createAssetService() {
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets every asset for a tag by walking paginated asset API responses.
|
||||
*
|
||||
* @param tag - The tag to filter by (e.g., 'models', 'input')
|
||||
* @param includePublic - Whether to include public assets (default: true)
|
||||
* @param options - Pagination options
|
||||
* @param options.limit - Page size for each request (default: 500)
|
||||
* @param options.signal - Optional abort signal for cancelling requests
|
||||
* @returns Promise<AssetItem[]> - Full asset objects filtered by tag
|
||||
*/
|
||||
async function getAllAssetsByTag(
|
||||
tag: string,
|
||||
includePublic: boolean = true,
|
||||
{ limit = DEFAULT_LIMIT, signal }: AssetPaginationOptions = {}
|
||||
): Promise<AssetItem[]> {
|
||||
const assets: AssetItem[] = []
|
||||
const pageSize = limit > 0 ? limit : DEFAULT_LIMIT
|
||||
let offset = 0
|
||||
|
||||
while (true) {
|
||||
if (signal?.aborted) throw createAbortError()
|
||||
|
||||
const data = await handleAssetRequest(
|
||||
{
|
||||
includeTags: [tag],
|
||||
limit: pageSize,
|
||||
offset,
|
||||
includePublic,
|
||||
signal
|
||||
},
|
||||
`assets for tag ${tag}`
|
||||
)
|
||||
const batch = data.assets ?? []
|
||||
assets.push(...batch.filter((asset) => !asset.tags.includes(MISSING_TAG)))
|
||||
|
||||
const noMoreFromServer = data.has_more === false
|
||||
const inferredLastPage =
|
||||
data.has_more === undefined && batch.length < pageSize
|
||||
if (batch.length === 0 || noMoreFromServer || inferredLastPage) {
|
||||
return assets
|
||||
}
|
||||
|
||||
offset += batch.length
|
||||
}
|
||||
}
|
||||
|
||||
function startInputAssetsIncludingPublicRequest(): Promise<AssetItem[]> {
|
||||
const requestId = ++inputAssetsIncludingPublicRequestId
|
||||
|
||||
pendingInputAssetsIncludingPublic = getAllAssetsByTag('input', true, {
|
||||
limit: INPUT_ASSETS_WITH_PUBLIC_LIMIT
|
||||
})
|
||||
.then((assets) => {
|
||||
if (requestId === inputAssetsIncludingPublicRequestId) {
|
||||
inputAssetsIncludingPublic = assets
|
||||
}
|
||||
return assets
|
||||
})
|
||||
.finally(() => {
|
||||
if (requestId === inputAssetsIncludingPublicRequestId) {
|
||||
pendingInputAssetsIncludingPublic = null
|
||||
}
|
||||
})
|
||||
|
||||
void pendingInputAssetsIncludingPublic.catch(() => {})
|
||||
return pendingInputAssetsIncludingPublic
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets cached input assets including public assets for missing media checks.
|
||||
* Caller aborts cancel only that caller; shared fetches are invalidated
|
||||
* through invalidateInputAssetsIncludingPublic().
|
||||
*/
|
||||
async function getInputAssetsIncludingPublic(
|
||||
signal?: AbortSignal
|
||||
): Promise<AssetItem[]> {
|
||||
throwIfAborted(signal)
|
||||
if (inputAssetsIncludingPublic) return inputAssetsIncludingPublic
|
||||
|
||||
const request =
|
||||
pendingInputAssetsIncludingPublic ??
|
||||
startInputAssetsIncludingPublicRequest()
|
||||
return await withCallerAbort(request, signal)
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether an asset exists for an exact asset hash.
|
||||
*
|
||||
* Uses the HEAD /assets/hash/{hash} endpoint and maps status-only responses:
|
||||
* 200 -> exists, 404 -> missing, and 400 -> invalid hash format.
|
||||
*/
|
||||
async function checkAssetHash(
|
||||
assetHash: string,
|
||||
signal?: AbortSignal
|
||||
): Promise<AssetHashStatus> {
|
||||
const response = await api.fetchApi(
|
||||
`${ASSETS_ENDPOINT}/hash/${encodeURIComponent(assetHash)}`,
|
||||
{
|
||||
method: 'HEAD',
|
||||
signal
|
||||
}
|
||||
)
|
||||
|
||||
if (response.status === 200) return 'exists'
|
||||
if (response.status === 404) return 'missing'
|
||||
if (response.status === 400) return 'invalid'
|
||||
|
||||
throw new Error(`Unexpected asset hash check status: ${response.status}`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes an asset by ID
|
||||
* Only available in cloud environment
|
||||
@@ -438,6 +641,8 @@ function createAssetService() {
|
||||
`Unable to delete asset ${id}: Server returned ${res.status}`
|
||||
)
|
||||
}
|
||||
|
||||
invalidateInputAssetsIncludingPublic()
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -545,7 +750,9 @@ function createAssetService() {
|
||||
)
|
||||
}
|
||||
|
||||
return await res.json()
|
||||
const asset = validateUploadedAssetResponse(await res.json())
|
||||
invalidateInputAssetsCacheIfNeeded(params.tags)
|
||||
return asset
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -598,7 +805,9 @@ function createAssetService() {
|
||||
)
|
||||
}
|
||||
|
||||
return await res.json()
|
||||
const asset = validateUploadedAssetResponse(await res.json())
|
||||
invalidateInputAssetsCacheIfNeeded(params.tags)
|
||||
return asset
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -628,6 +837,7 @@ function createAssetService() {
|
||||
if (!parseResult.success) {
|
||||
throw fromZodError(parseResult.error)
|
||||
}
|
||||
invalidateInputAssetsIncludingPublic()
|
||||
return parseResult.data
|
||||
}
|
||||
|
||||
@@ -658,6 +868,7 @@ function createAssetService() {
|
||||
if (!parseResult.success) {
|
||||
throw fromZodError(parseResult.error)
|
||||
}
|
||||
invalidateInputAssetsIncludingPublic()
|
||||
return parseResult.data
|
||||
}
|
||||
|
||||
@@ -709,6 +920,13 @@ function createAssetService() {
|
||||
)
|
||||
)
|
||||
}
|
||||
if (
|
||||
params.tags?.includes('input') &&
|
||||
result.data.type === 'async' &&
|
||||
result.data.task.status === 'completed'
|
||||
) {
|
||||
invalidateInputAssetsIncludingPublic()
|
||||
}
|
||||
return result.data
|
||||
}
|
||||
|
||||
@@ -724,6 +942,7 @@ function createAssetService() {
|
||||
)
|
||||
)
|
||||
}
|
||||
invalidateInputAssetsCacheIfNeeded(params.tags)
|
||||
return result.data
|
||||
}
|
||||
|
||||
@@ -764,6 +983,10 @@ function createAssetService() {
|
||||
getAssetsForNodeType,
|
||||
getAssetDetails,
|
||||
getAssetsByTag,
|
||||
getAllAssetsByTag,
|
||||
getInputAssetsIncludingPublic,
|
||||
invalidateInputAssetsIncludingPublic,
|
||||
checkAssetHash,
|
||||
deleteAsset,
|
||||
updateAsset,
|
||||
addAssetTags,
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import { fromAny } from '@total-typescript/shoehorn'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import type { LGraph } from '@/lib/litegraph/src/LGraph'
|
||||
import type { LGraphNode } from '@/lib/litegraph/src/LGraphNode'
|
||||
import type { IComboWidget } from '@/lib/litegraph/src/types/widgets'
|
||||
import type { AssetItem } from '@/platform/assets/schemas/assetSchema'
|
||||
import type * as AssetServiceModule from '@/platform/assets/services/assetService'
|
||||
import {
|
||||
scanAllMediaCandidates,
|
||||
scanNodeMediaCandidates,
|
||||
@@ -13,6 +15,13 @@ import {
|
||||
} from './missingMediaScan'
|
||||
import type { MissingMediaCandidate } from './types'
|
||||
|
||||
const { mockCheckAssetHash, mockGetInputAssetsIncludingPublic } = vi.hoisted(
|
||||
() => ({
|
||||
mockCheckAssetHash: vi.fn(),
|
||||
mockGetInputAssetsIncludingPublic: vi.fn()
|
||||
})
|
||||
)
|
||||
|
||||
vi.mock('@/utils/graphTraversalUtil', () => ({
|
||||
collectAllNodes: (graph: { _testNodes: LGraphNode[] }) => graph._testNodes,
|
||||
getExecutionIdByNode: (
|
||||
@@ -21,6 +30,21 @@ vi.mock('@/utils/graphTraversalUtil', () => ({
|
||||
) => node._testExecutionId ?? String(node.id)
|
||||
}))
|
||||
|
||||
vi.mock('@/platform/assets/services/assetService', async () => {
|
||||
const actual = await vi.importActual<typeof AssetServiceModule>(
|
||||
'@/platform/assets/services/assetService'
|
||||
)
|
||||
|
||||
return {
|
||||
...actual,
|
||||
assetService: {
|
||||
...actual.assetService,
|
||||
checkAssetHash: mockCheckAssetHash,
|
||||
getInputAssetsIncludingPublic: mockGetInputAssetsIncludingPublic
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
function makeCandidate(
|
||||
nodeId: string,
|
||||
name: string,
|
||||
@@ -70,6 +94,16 @@ function makeGraph(nodes: LGraphNode[]): LGraph {
|
||||
return fromAny<LGraph, unknown>({ _testNodes: nodes })
|
||||
}
|
||||
|
||||
function makeAsset(name: string, assetHash: string | null = null): AssetItem {
|
||||
return {
|
||||
id: name,
|
||||
name,
|
||||
asset_hash: assetHash,
|
||||
mime_type: null,
|
||||
tags: ['input']
|
||||
}
|
||||
}
|
||||
|
||||
describe('scanNodeMediaCandidates', () => {
|
||||
it('returns candidate for a LoadImage node with missing image', () => {
|
||||
const graph = makeGraph([])
|
||||
@@ -232,37 +266,43 @@ describe('groupCandidatesByMediaType', () => {
|
||||
})
|
||||
|
||||
describe('verifyCloudMediaCandidates', () => {
|
||||
it('marks candidates missing when not in input assets', async () => {
|
||||
const existingHash =
|
||||
'blake3:1111111111111111111111111111111111111111111111111111111111111111'
|
||||
const missingHash =
|
||||
'blake3:2222222222222222222222222222222222222222222222222222222222222222'
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockCheckAssetHash.mockResolvedValue('missing')
|
||||
mockGetInputAssetsIncludingPublic.mockResolvedValue([])
|
||||
})
|
||||
|
||||
it('marks candidates missing when the asset hash is not found', async () => {
|
||||
const candidates = [
|
||||
makeCandidate('1', 'abc123.png', { isMissing: undefined }),
|
||||
makeCandidate('2', 'def456.png', { isMissing: undefined })
|
||||
makeCandidate('1', missingHash, { isMissing: undefined }),
|
||||
makeCandidate('2', existingHash, { isMissing: undefined })
|
||||
]
|
||||
|
||||
const mockStore = {
|
||||
updateInputs: async () => {},
|
||||
inputAssets: [{ asset_hash: 'def456.png', name: 'my-photo.png' }]
|
||||
}
|
||||
const checkAssetHash = vi.fn(async (assetHash: string) =>
|
||||
assetHash === existingHash ? ('exists' as const) : ('missing' as const)
|
||||
)
|
||||
|
||||
await verifyCloudMediaCandidates(candidates, undefined, mockStore)
|
||||
await verifyCloudMediaCandidates(candidates, undefined, checkAssetHash)
|
||||
|
||||
expect(candidates[0].isMissing).toBe(true)
|
||||
expect(candidates[1].isMissing).toBe(false)
|
||||
})
|
||||
|
||||
it('calls updateInputs before checking assets', async () => {
|
||||
let updateCalled = false
|
||||
const candidates = [makeCandidate('1', 'abc.png', { isMissing: undefined })]
|
||||
it('uses assetService.checkAssetHash by default', async () => {
|
||||
const candidates = [
|
||||
makeCandidate('1', existingHash, { isMissing: undefined })
|
||||
]
|
||||
mockCheckAssetHash.mockResolvedValue('exists')
|
||||
|
||||
const mockStore = {
|
||||
updateInputs: async () => {
|
||||
updateCalled = true
|
||||
},
|
||||
inputAssets: []
|
||||
}
|
||||
await verifyCloudMediaCandidates(candidates)
|
||||
|
||||
await verifyCloudMediaCandidates(candidates, undefined, mockStore)
|
||||
|
||||
expect(updateCalled).toBe(true)
|
||||
expect(candidates[0].isMissing).toBe(false)
|
||||
expect(mockCheckAssetHash).toHaveBeenCalledWith(existingHash, undefined)
|
||||
})
|
||||
|
||||
it('respects abort signal before execution', async () => {
|
||||
@@ -270,69 +310,221 @@ describe('verifyCloudMediaCandidates', () => {
|
||||
controller.abort()
|
||||
|
||||
const candidates = [
|
||||
makeCandidate('1', 'abc123.png', { isMissing: undefined })
|
||||
makeCandidate('1', missingHash, { isMissing: undefined })
|
||||
]
|
||||
|
||||
await verifyCloudMediaCandidates(candidates, controller.signal)
|
||||
|
||||
expect(candidates[0].isMissing).toBeUndefined()
|
||||
expect(mockCheckAssetHash).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('respects abort signal after updateInputs', async () => {
|
||||
it('respects abort signal after hash verification', async () => {
|
||||
const controller = new AbortController()
|
||||
const candidates = [makeCandidate('1', 'abc.png', { isMissing: undefined })]
|
||||
const candidates = [
|
||||
makeCandidate('1', existingHash, { isMissing: undefined })
|
||||
]
|
||||
const checkAssetHash = vi.fn(async () => {
|
||||
controller.abort()
|
||||
return 'exists' as const
|
||||
})
|
||||
|
||||
const mockStore = {
|
||||
updateInputs: async () => {
|
||||
controller.abort()
|
||||
},
|
||||
inputAssets: [{ asset_hash: 'abc.png', name: 'photo.png' }]
|
||||
}
|
||||
|
||||
await verifyCloudMediaCandidates(candidates, controller.signal, mockStore)
|
||||
await verifyCloudMediaCandidates(
|
||||
candidates,
|
||||
controller.signal,
|
||||
checkAssetHash
|
||||
)
|
||||
|
||||
expect(candidates[0].isMissing).toBeUndefined()
|
||||
})
|
||||
|
||||
it('skips candidates already resolved as true', async () => {
|
||||
const candidates = [makeCandidate('1', 'abc.png', { isMissing: true })]
|
||||
const candidates = [makeCandidate('1', missingHash, { isMissing: true })]
|
||||
|
||||
const mockStore = {
|
||||
updateInputs: async () => {},
|
||||
inputAssets: []
|
||||
}
|
||||
|
||||
await verifyCloudMediaCandidates(candidates, undefined, mockStore)
|
||||
await verifyCloudMediaCandidates(candidates)
|
||||
|
||||
expect(candidates[0].isMissing).toBe(true)
|
||||
expect(mockCheckAssetHash).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('skips candidates already resolved as false', async () => {
|
||||
const candidates = [makeCandidate('1', 'abc.png', { isMissing: false })]
|
||||
const candidates = [makeCandidate('1', existingHash, { isMissing: false })]
|
||||
|
||||
const mockStore = {
|
||||
updateInputs: async () => {},
|
||||
inputAssets: []
|
||||
}
|
||||
|
||||
await verifyCloudMediaCandidates(candidates, undefined, mockStore)
|
||||
await verifyCloudMediaCandidates(candidates)
|
||||
|
||||
expect(candidates[0].isMissing).toBe(false)
|
||||
expect(mockCheckAssetHash).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('skips entirely when no pending candidates', async () => {
|
||||
let updateCalled = false
|
||||
const candidates = [makeCandidate('1', 'abc.png', { isMissing: true })]
|
||||
const candidates = [makeCandidate('1', missingHash, { isMissing: true })]
|
||||
|
||||
const mockStore = {
|
||||
updateInputs: async () => {
|
||||
updateCalled = true
|
||||
},
|
||||
inputAssets: []
|
||||
}
|
||||
await verifyCloudMediaCandidates(candidates)
|
||||
|
||||
await verifyCloudMediaCandidates(candidates, undefined, mockStore)
|
||||
expect(mockCheckAssetHash).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
expect(updateCalled).toBe(false)
|
||||
it('falls back to input assets for non-blake3 candidate names', async () => {
|
||||
const candidates = [
|
||||
makeCandidate('1', 'photo.png', { isMissing: undefined }),
|
||||
makeCandidate('2', 'missing.png', { isMissing: undefined })
|
||||
]
|
||||
const fetchInputAssets = vi.fn(async () => [
|
||||
makeAsset('stored-photo.png', 'photo.png')
|
||||
])
|
||||
|
||||
await verifyCloudMediaCandidates(
|
||||
candidates,
|
||||
undefined,
|
||||
undefined,
|
||||
fetchInputAssets
|
||||
)
|
||||
|
||||
expect(mockCheckAssetHash).not.toHaveBeenCalled()
|
||||
expect(fetchInputAssets).toHaveBeenCalledOnce()
|
||||
expect(candidates[0].isMissing).toBe(false)
|
||||
expect(candidates[1].isMissing).toBe(true)
|
||||
})
|
||||
|
||||
it('uses public input assets for default legacy fallback', async () => {
|
||||
const candidates = [
|
||||
makeCandidate('1', 'public-photo.png', { isMissing: undefined })
|
||||
]
|
||||
const inputAssets = Array.from({ length: 500 }, (_, index) =>
|
||||
makeAsset(`asset-${index}.png`)
|
||||
)
|
||||
inputAssets[42] = makeAsset('public-asset-record', 'public-photo.png')
|
||||
mockGetInputAssetsIncludingPublic.mockResolvedValue(inputAssets)
|
||||
|
||||
await verifyCloudMediaCandidates(candidates)
|
||||
|
||||
expect(mockGetInputAssetsIncludingPublic).toHaveBeenCalledWith(undefined)
|
||||
expect(candidates[0].isMissing).toBe(false)
|
||||
})
|
||||
|
||||
it('silences aborts while loading legacy fallback input assets', async () => {
|
||||
const abortError = new Error('aborted')
|
||||
abortError.name = 'AbortError'
|
||||
const controller = new AbortController()
|
||||
const candidates = [
|
||||
makeCandidate('1', 'photo.png', { isMissing: undefined })
|
||||
]
|
||||
const fetchInputAssets = vi.fn(async () => {
|
||||
controller.abort()
|
||||
throw abortError
|
||||
})
|
||||
|
||||
await expect(
|
||||
verifyCloudMediaCandidates(
|
||||
candidates,
|
||||
controller.signal,
|
||||
undefined,
|
||||
fetchInputAssets
|
||||
)
|
||||
).resolves.toBeUndefined()
|
||||
|
||||
expect(candidates[0].isMissing).toBeUndefined()
|
||||
})
|
||||
|
||||
it('silences aborts from the default legacy fallback input asset store path', async () => {
|
||||
const abortError = new Error('aborted')
|
||||
abortError.name = 'AbortError'
|
||||
const controller = new AbortController()
|
||||
const candidates = [
|
||||
makeCandidate('1', 'photo.png', { isMissing: undefined })
|
||||
]
|
||||
mockGetInputAssetsIncludingPublic.mockImplementationOnce(async () => {
|
||||
controller.abort()
|
||||
throw abortError
|
||||
})
|
||||
|
||||
await expect(
|
||||
verifyCloudMediaCandidates(candidates, controller.signal)
|
||||
).resolves.toBeUndefined()
|
||||
|
||||
expect(mockGetInputAssetsIncludingPublic).toHaveBeenCalledWith(
|
||||
controller.signal
|
||||
)
|
||||
expect(candidates[0].isMissing).toBeUndefined()
|
||||
})
|
||||
|
||||
it('falls back to input assets when the hash endpoint returns 400', async () => {
|
||||
const candidates = [
|
||||
makeCandidate('1', existingHash, { isMissing: undefined })
|
||||
]
|
||||
mockCheckAssetHash.mockResolvedValue('invalid')
|
||||
const fetchInputAssets = vi.fn(async () => [
|
||||
makeAsset('photo.png', existingHash)
|
||||
])
|
||||
|
||||
await verifyCloudMediaCandidates(
|
||||
candidates,
|
||||
undefined,
|
||||
undefined,
|
||||
fetchInputAssets
|
||||
)
|
||||
|
||||
expect(mockCheckAssetHash).toHaveBeenCalledWith(existingHash, undefined)
|
||||
expect(fetchInputAssets).toHaveBeenCalledOnce()
|
||||
expect(candidates[0].isMissing).toBe(false)
|
||||
})
|
||||
|
||||
it('falls back to input assets when hash verification fails', async () => {
|
||||
const warn = vi.spyOn(console, 'warn').mockImplementation(() => {})
|
||||
const candidates = [
|
||||
makeCandidate('1', existingHash, { isMissing: undefined })
|
||||
]
|
||||
const checkAssetHash = vi.fn(async () => {
|
||||
throw new Error('network failed')
|
||||
})
|
||||
const fetchInputAssets = vi.fn(async () => [
|
||||
makeAsset('photo.png', existingHash)
|
||||
])
|
||||
|
||||
await verifyCloudMediaCandidates(
|
||||
candidates,
|
||||
undefined,
|
||||
checkAssetHash,
|
||||
fetchInputAssets
|
||||
)
|
||||
|
||||
expect(fetchInputAssets).toHaveBeenCalledOnce()
|
||||
expect(candidates[0].isMissing).toBe(false)
|
||||
expect(warn).toHaveBeenCalledOnce()
|
||||
warn.mockRestore()
|
||||
})
|
||||
|
||||
it('does not call the hash endpoint for malformed blake3-looking values', async () => {
|
||||
const malformedHash = 'blake3:abc'
|
||||
const candidates = [
|
||||
makeCandidate('1', malformedHash, { isMissing: undefined })
|
||||
]
|
||||
const fetchInputAssets = vi.fn(async () => [
|
||||
makeAsset('legacy.png', malformedHash)
|
||||
])
|
||||
|
||||
await verifyCloudMediaCandidates(
|
||||
candidates,
|
||||
undefined,
|
||||
undefined,
|
||||
fetchInputAssets
|
||||
)
|
||||
|
||||
expect(mockCheckAssetHash).not.toHaveBeenCalled()
|
||||
expect(fetchInputAssets).toHaveBeenCalledOnce()
|
||||
expect(candidates[0].isMissing).toBe(false)
|
||||
})
|
||||
|
||||
it('deduplicates checks for repeated candidate names', async () => {
|
||||
const candidates = [
|
||||
makeCandidate('1', missingHash, { isMissing: undefined }),
|
||||
makeCandidate('2', missingHash, { isMissing: undefined })
|
||||
]
|
||||
|
||||
await verifyCloudMediaCandidates(candidates)
|
||||
|
||||
expect(mockCheckAssetHash).toHaveBeenCalledOnce()
|
||||
expect(candidates[0].isMissing).toBe(true)
|
||||
expect(candidates[1].isMissing).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -18,6 +18,12 @@ import {
|
||||
} from '@/utils/graphTraversalUtil'
|
||||
import { LGraphEventMode } from '@/lib/litegraph/src/types/globalEnums'
|
||||
import { resolveComboValues } from '@/utils/litegraphUtil'
|
||||
import type { AssetItem } from '@/platform/assets/schemas/assetSchema'
|
||||
import type { AssetHashStatus } from '@/platform/assets/services/assetService'
|
||||
import {
|
||||
assetService,
|
||||
isBlake3AssetHash
|
||||
} from '@/platform/assets/services/assetService'
|
||||
|
||||
/** Map of node types to their media widget name and media type. */
|
||||
const MEDIA_NODE_WIDGETS: Record<
|
||||
@@ -106,41 +112,130 @@ export function scanNodeMediaCandidates(
|
||||
return candidates
|
||||
}
|
||||
|
||||
interface InputVerifier {
|
||||
updateInputs: () => Promise<unknown>
|
||||
inputAssets: Array<{ asset_hash?: string | null; name: string }>
|
||||
type AssetHashVerifier = (
|
||||
assetHash: string,
|
||||
signal?: AbortSignal
|
||||
) => Promise<AssetHashStatus>
|
||||
|
||||
type InputAssetFetcher = (signal?: AbortSignal) => Promise<AssetItem[]>
|
||||
|
||||
function groupCandidatesForHashLookup(candidates: MissingMediaCandidate[]): {
|
||||
candidatesByHash: Map<string, MissingMediaCandidate[]>
|
||||
legacyCandidates: MissingMediaCandidate[]
|
||||
} {
|
||||
const candidatesByHash = new Map<string, MissingMediaCandidate[]>()
|
||||
const legacyCandidates: MissingMediaCandidate[] = []
|
||||
|
||||
for (const candidate of candidates) {
|
||||
if (!isBlake3AssetHash(candidate.name)) {
|
||||
legacyCandidates.push(candidate)
|
||||
continue
|
||||
}
|
||||
|
||||
const hashCandidates = candidatesByHash.get(candidate.name)
|
||||
if (hashCandidates) hashCandidates.push(candidate)
|
||||
else candidatesByHash.set(candidate.name, [candidate])
|
||||
}
|
||||
|
||||
return { candidatesByHash, legacyCandidates }
|
||||
}
|
||||
|
||||
async function verifyCandidatesByHash(
|
||||
candidatesByHash: Map<string, MissingMediaCandidate[]>,
|
||||
legacyCandidates: MissingMediaCandidate[],
|
||||
signal: AbortSignal | undefined,
|
||||
checkAssetHash: AssetHashVerifier
|
||||
): Promise<void> {
|
||||
await Promise.all(
|
||||
Array.from(candidatesByHash, async ([assetHash, hashCandidates]) => {
|
||||
if (signal?.aborted) return
|
||||
|
||||
let status: AssetHashStatus
|
||||
try {
|
||||
status = await checkAssetHash(assetHash, signal)
|
||||
if (signal?.aborted) return
|
||||
} catch (err) {
|
||||
if (signal?.aborted || isAbortError(err)) return
|
||||
console.warn(
|
||||
'[Missing Media Pipeline] Failed to verify asset hash:',
|
||||
err
|
||||
)
|
||||
legacyCandidates.push(...hashCandidates)
|
||||
return
|
||||
}
|
||||
|
||||
if (status === 'invalid') {
|
||||
legacyCandidates.push(...hashCandidates)
|
||||
return
|
||||
}
|
||||
|
||||
for (const candidate of hashCandidates) {
|
||||
candidate.isMissing = status === 'missing'
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify cloud media candidates against the input assets fetched from the
|
||||
* assets store. Mutates candidates' `isMissing` in place.
|
||||
* Verify cloud media candidates by probing the asset hash endpoint first.
|
||||
* Invalid hash values fall back to the legacy input asset list check.
|
||||
*/
|
||||
export async function verifyCloudMediaCandidates(
|
||||
candidates: MissingMediaCandidate[],
|
||||
signal?: AbortSignal,
|
||||
assetsStore?: InputVerifier
|
||||
checkAssetHash: AssetHashVerifier = assetService.checkAssetHash,
|
||||
fetchInputAssets: InputAssetFetcher = fetchMissingInputAssets
|
||||
): Promise<void> {
|
||||
if (signal?.aborted) return
|
||||
|
||||
const pending = candidates.filter((c) => c.isMissing === undefined)
|
||||
if (pending.length === 0) return
|
||||
|
||||
const store =
|
||||
assetsStore ?? (await import('@/stores/assetsStore')).useAssetsStore()
|
||||
const { candidatesByHash, legacyCandidates } =
|
||||
groupCandidatesForHashLookup(pending)
|
||||
await verifyCandidatesByHash(
|
||||
candidatesByHash,
|
||||
legacyCandidates,
|
||||
signal,
|
||||
checkAssetHash
|
||||
)
|
||||
|
||||
await store.updateInputs()
|
||||
if (signal?.aborted || legacyCandidates.length === 0) return
|
||||
|
||||
let inputAssets: AssetItem[]
|
||||
try {
|
||||
inputAssets = await fetchInputAssets(signal)
|
||||
} catch (err) {
|
||||
if (signal?.aborted || isAbortError(err)) return
|
||||
throw err
|
||||
}
|
||||
|
||||
if (signal?.aborted) return
|
||||
|
||||
const assetHashes = new Set(
|
||||
store.inputAssets.map((a) => a.asset_hash).filter((h): h is string => !!h)
|
||||
inputAssets.map((a) => a.asset_hash).filter((h): h is string => !!h)
|
||||
)
|
||||
|
||||
for (const c of pending) {
|
||||
c.isMissing = !assetHashes.has(c.name)
|
||||
for (const candidate of legacyCandidates) {
|
||||
candidate.isMissing = !assetHashes.has(candidate.name)
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchMissingInputAssets(
|
||||
signal?: AbortSignal
|
||||
): Promise<AssetItem[]> {
|
||||
return await assetService.getInputAssetsIncludingPublic(signal)
|
||||
}
|
||||
|
||||
function isAbortError(err: unknown): boolean {
|
||||
return (
|
||||
typeof err === 'object' &&
|
||||
err !== null &&
|
||||
'name' in err &&
|
||||
err.name === 'AbortError'
|
||||
)
|
||||
}
|
||||
|
||||
/** Group confirmed-missing candidates by file name into view models. */
|
||||
export function groupCandidatesByName(
|
||||
candidates: MissingMediaCandidate[]
|
||||
|
||||
@@ -19,6 +19,11 @@ import activeSubgraphUnmatchedModel from '@/platform/missingModel/__fixtures__/a
|
||||
import bypassedSubgraphUnmatchedModel from '@/platform/missingModel/__fixtures__/bypassedSubgraphUnmatchedModel.json' with { type: 'json' }
|
||||
import type { MissingModelCandidate } from '@/platform/missingModel/types'
|
||||
import type { ComfyWorkflowJSON } from '@/platform/workflow/validation/schemas/workflowSchema'
|
||||
import type * as AssetServiceModule from '@/platform/assets/services/assetService'
|
||||
|
||||
const { mockCheckAssetHash } = vi.hoisted(() => ({
|
||||
mockCheckAssetHash: vi.fn()
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/graphTraversalUtil', () => ({
|
||||
collectAllNodes: (graph: { _testNodes: LGraphNode[] }) => graph._testNodes,
|
||||
@@ -28,6 +33,20 @@ vi.mock('@/utils/graphTraversalUtil', () => ({
|
||||
) => node._testExecutionId ?? String(node.id)
|
||||
}))
|
||||
|
||||
vi.mock('@/platform/assets/services/assetService', async () => {
|
||||
const actual = await vi.importActual<typeof AssetServiceModule>(
|
||||
'@/platform/assets/services/assetService'
|
||||
)
|
||||
|
||||
return {
|
||||
...actual,
|
||||
assetService: {
|
||||
...actual.assetService,
|
||||
checkAssetHash: mockCheckAssetHash
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
/** Helper: create a combo widget mock */
|
||||
function makeComboWidget(
|
||||
name: string,
|
||||
@@ -43,7 +62,7 @@ function makeComboWidget(
|
||||
}
|
||||
|
||||
/** Helper: create an asset widget mock (Cloud combo replacement) */
|
||||
function makeAssetWidget(name: string, value: string): IBaseWidget {
|
||||
function makeAssetWidget(name: string, value: unknown): IBaseWidget {
|
||||
return fromAny<IBaseWidget, unknown>({
|
||||
type: 'asset',
|
||||
name,
|
||||
@@ -551,6 +570,16 @@ describe('scanAllModelCandidates', () => {
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
|
||||
it('should skip asset widgets with non-string values', () => {
|
||||
const graph = makeGraph([
|
||||
makeNode(1, 'SomeNode', [makeAssetWidget('ckpt_name', 123)])
|
||||
])
|
||||
|
||||
const result = scanAllModelCandidates(graph, noAssetSupport)
|
||||
|
||||
expect(result).toEqual([])
|
||||
})
|
||||
|
||||
it('should scan both combo and asset widgets on the same node', () => {
|
||||
const graph = makeGraph([
|
||||
makeNode(1, 'DualLoaderNode', [
|
||||
@@ -1411,6 +1440,7 @@ function makeAssetCandidate(
|
||||
describe('verifyAssetSupportedCandidates', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockCheckAssetHash.mockResolvedValue('missing')
|
||||
mockIsModelLoading.mockReturnValue(false)
|
||||
mockHasMore.mockReturnValue(false)
|
||||
mockGetAssets.mockReturnValue([])
|
||||
@@ -1428,6 +1458,125 @@ describe('verifyAssetSupportedCandidates', () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('should resolve isMissing=false when the blake3 hash endpoint finds the asset', async () => {
|
||||
const hash =
|
||||
'1111111111111111111111111111111111111111111111111111111111111111'
|
||||
const candidates = [
|
||||
makeAssetCandidate('model.safetensors', {
|
||||
hash,
|
||||
hashType: 'blake3'
|
||||
})
|
||||
]
|
||||
mockCheckAssetHash.mockResolvedValue('exists')
|
||||
|
||||
await verifyAssetSupportedCandidates(candidates)
|
||||
|
||||
expect(candidates[0].isMissing).toBe(false)
|
||||
expect(mockCheckAssetHash).toHaveBeenCalledWith(`blake3:${hash}`, undefined)
|
||||
expect(mockUpdateModelsForNodeType).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should fall back to asset store matching when the blake3 hash is not found', async () => {
|
||||
const hash =
|
||||
'2222222222222222222222222222222222222222222222222222222222222222'
|
||||
const candidates = [
|
||||
makeAssetCandidate('my_model.safetensors', {
|
||||
hash,
|
||||
hashType: 'blake3'
|
||||
})
|
||||
]
|
||||
mockCheckAssetHash.mockResolvedValue('missing')
|
||||
mockGetAssets.mockReturnValue([
|
||||
{
|
||||
id: '1',
|
||||
name: 'my_model.safetensors',
|
||||
asset_hash: null,
|
||||
metadata: { filename: 'my_model.safetensors' }
|
||||
}
|
||||
])
|
||||
|
||||
await verifyAssetSupportedCandidates(candidates)
|
||||
|
||||
expect(candidates[0].isMissing).toBe(false)
|
||||
expect(mockUpdateModelsForNodeType).toHaveBeenCalledWith(
|
||||
'CheckpointLoaderSimple'
|
||||
)
|
||||
})
|
||||
|
||||
it('should fall back to asset store matching when hash verification fails', async () => {
|
||||
const warn = vi.spyOn(console, 'warn').mockImplementation(() => {})
|
||||
const hash =
|
||||
'3333333333333333333333333333333333333333333333333333333333333333'
|
||||
const candidates = [
|
||||
makeAssetCandidate('my_model.safetensors', {
|
||||
hash,
|
||||
hashType: 'blake3'
|
||||
})
|
||||
]
|
||||
mockCheckAssetHash.mockRejectedValue(new Error('network failed'))
|
||||
mockGetAssets.mockReturnValue([
|
||||
{
|
||||
id: '1',
|
||||
name: 'my_model.safetensors',
|
||||
asset_hash: null,
|
||||
metadata: { filename: 'my_model.safetensors' }
|
||||
}
|
||||
])
|
||||
|
||||
await verifyAssetSupportedCandidates(candidates)
|
||||
|
||||
expect(candidates[0].isMissing).toBe(false)
|
||||
expect(mockUpdateModelsForNodeType).toHaveBeenCalledWith(
|
||||
'CheckpointLoaderSimple'
|
||||
)
|
||||
expect(warn).toHaveBeenCalledOnce()
|
||||
warn.mockRestore()
|
||||
})
|
||||
|
||||
it('should skip malformed blake3 hashes and use asset store matching', async () => {
|
||||
const candidates = [
|
||||
makeAssetCandidate('my_model.safetensors', {
|
||||
hash: 'abc123',
|
||||
hashType: 'blake3'
|
||||
})
|
||||
]
|
||||
mockGetAssets.mockReturnValue([
|
||||
{
|
||||
id: '1',
|
||||
name: 'my_model.safetensors',
|
||||
asset_hash: null,
|
||||
metadata: { filename: 'my_model.safetensors' }
|
||||
}
|
||||
])
|
||||
|
||||
await verifyAssetSupportedCandidates(candidates)
|
||||
|
||||
expect(mockCheckAssetHash).not.toHaveBeenCalled()
|
||||
expect(candidates[0].isMissing).toBe(false)
|
||||
})
|
||||
|
||||
it('should not warn or fall back when hash verification is aborted', async () => {
|
||||
const warn = vi.spyOn(console, 'warn').mockImplementation(() => {})
|
||||
const abortError = new Error('aborted')
|
||||
abortError.name = 'AbortError'
|
||||
const hash =
|
||||
'4444444444444444444444444444444444444444444444444444444444444444'
|
||||
const candidates = [
|
||||
makeAssetCandidate('my_model.safetensors', {
|
||||
hash,
|
||||
hashType: 'blake3'
|
||||
})
|
||||
]
|
||||
mockCheckAssetHash.mockRejectedValue(abortError)
|
||||
|
||||
await verifyAssetSupportedCandidates(candidates)
|
||||
|
||||
expect(candidates[0].isMissing).toBeUndefined()
|
||||
expect(mockUpdateModelsForNodeType).not.toHaveBeenCalled()
|
||||
expect(warn).not.toHaveBeenCalled()
|
||||
warn.mockRestore()
|
||||
})
|
||||
|
||||
it('should resolve isMissing=false when asset with matching hash exists', async () => {
|
||||
const candidates = [
|
||||
makeAssetCandidate('model.safetensors', {
|
||||
@@ -1442,6 +1591,7 @@ describe('verifyAssetSupportedCandidates', () => {
|
||||
await verifyAssetSupportedCandidates(candidates)
|
||||
|
||||
expect(candidates[0].isMissing).toBe(false)
|
||||
expect(mockCheckAssetHash).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should resolve isMissing=false when asset with matching filename exists', async () => {
|
||||
|
||||
@@ -24,6 +24,11 @@ import {
|
||||
} from '@/utils/graphTraversalUtil'
|
||||
import { LGraphEventMode } from '@/lib/litegraph/src/types/globalEnums'
|
||||
import { resolveComboValues } from '@/utils/litegraphUtil'
|
||||
import type { AssetHashStatus } from '@/platform/assets/services/assetService'
|
||||
import {
|
||||
assetService,
|
||||
toBlake3AssetHash
|
||||
} from '@/platform/assets/services/assetService'
|
||||
|
||||
export type MissingModelWorkflowData = FlattenableWorkflowGraph & {
|
||||
models?: ModelFile[]
|
||||
@@ -177,7 +182,7 @@ function scanAssetWidget(
|
||||
getDirectory: ((nodeType: string) => string | undefined) | undefined
|
||||
): MissingModelCandidate | null {
|
||||
const value = widget.value
|
||||
if (!value.trim()) return null
|
||||
if (typeof value !== 'string' || !value.trim()) return null
|
||||
if (!isModelFileName(value)) return null
|
||||
|
||||
return {
|
||||
@@ -445,20 +450,68 @@ interface AssetVerifier {
|
||||
getAssets: (nodeType: string) => AssetItem[] | undefined
|
||||
}
|
||||
|
||||
type AssetHashVerifier = (
|
||||
assetHash: string,
|
||||
signal?: AbortSignal
|
||||
) => Promise<AssetHashStatus>
|
||||
|
||||
export async function verifyAssetSupportedCandidates(
|
||||
candidates: MissingModelCandidate[],
|
||||
signal?: AbortSignal,
|
||||
assetsStore?: AssetVerifier
|
||||
assetsStore?: AssetVerifier,
|
||||
checkAssetHash: AssetHashVerifier = assetService.checkAssetHash
|
||||
): Promise<void> {
|
||||
if (signal?.aborted) return
|
||||
|
||||
const pendingCandidates = candidates.filter(
|
||||
(c) => c.isAssetSupported && c.isMissing === undefined
|
||||
)
|
||||
if (pendingCandidates.length === 0) return
|
||||
|
||||
const pendingNodeTypes = new Set<string>()
|
||||
for (const c of candidates) {
|
||||
if (c.isAssetSupported && c.isMissing === undefined) {
|
||||
pendingNodeTypes.add(c.nodeType)
|
||||
const candidatesByHash = new Map<string, MissingModelCandidate[]>()
|
||||
|
||||
for (const candidate of pendingCandidates) {
|
||||
const assetHash = getBlake3AssetHash(candidate)
|
||||
if (!assetHash) {
|
||||
pendingNodeTypes.add(candidate.nodeType)
|
||||
continue
|
||||
}
|
||||
|
||||
const hashCandidates = candidatesByHash.get(assetHash)
|
||||
if (hashCandidates) hashCandidates.push(candidate)
|
||||
else candidatesByHash.set(assetHash, [candidate])
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
Array.from(candidatesByHash, async ([assetHash, hashCandidates]) => {
|
||||
if (signal?.aborted) return
|
||||
|
||||
try {
|
||||
const status = await checkAssetHash(assetHash, signal)
|
||||
if (signal?.aborted) return
|
||||
|
||||
if (status === 'exists') {
|
||||
for (const candidate of hashCandidates) {
|
||||
candidate.isMissing = false
|
||||
}
|
||||
return
|
||||
}
|
||||
} catch (err) {
|
||||
if (signal?.aborted || isAbortError(err)) return
|
||||
console.warn(
|
||||
'[Missing Model Pipeline] Failed to verify asset hash:',
|
||||
err
|
||||
)
|
||||
}
|
||||
|
||||
for (const candidate of hashCandidates) {
|
||||
pendingNodeTypes.add(candidate.nodeType)
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
if (signal?.aborted) return
|
||||
if (pendingNodeTypes.size === 0) return
|
||||
|
||||
const store =
|
||||
@@ -491,6 +544,20 @@ export async function verifyAssetSupportedCandidates(
|
||||
}
|
||||
}
|
||||
|
||||
function getBlake3AssetHash(candidate: MissingModelCandidate): string | null {
|
||||
if (candidate.hashType?.toLowerCase() !== 'blake3') return null
|
||||
return toBlake3AssetHash(candidate.hash)
|
||||
}
|
||||
|
||||
function isAbortError(err: unknown): boolean {
|
||||
return (
|
||||
typeof err === 'object' &&
|
||||
err !== null &&
|
||||
'name' in err &&
|
||||
err.name === 'AbortError'
|
||||
)
|
||||
}
|
||||
|
||||
function normalizePath(path: string): string {
|
||||
return path.replace(/\\/g, '/')
|
||||
}
|
||||
|
||||
@@ -11,6 +11,12 @@ export const FEATURE_SURVEYS: Record<string, FeatureSurveyConfig> = {
|
||||
triggerThreshold: 3,
|
||||
delayMs: 5000
|
||||
},
|
||||
'queue-progress-overlay': {
|
||||
featureId: 'queue-progress-overlay',
|
||||
typeformId: 'HZ5saxry',
|
||||
triggerThreshold: 16,
|
||||
delayMs: 5000
|
||||
},
|
||||
'error-panel': {
|
||||
featureId: 'error-panel',
|
||||
typeformId: 'iFp4p4mV',
|
||||
|
||||
@@ -0,0 +1,198 @@
|
||||
import { createTestingPinia } from '@pinia/testing'
|
||||
import { render, screen } from '@testing-library/vue'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { ref } from 'vue'
|
||||
import { createI18n } from 'vue-i18n'
|
||||
|
||||
import type { SubscriptionDialogReason } from '@/platform/cloud/subscription/composables/useSubscriptionDialog'
|
||||
|
||||
import SubscriptionRequiredDialogContentWorkspace from './SubscriptionRequiredDialogContentWorkspace.vue'
|
||||
|
||||
const mockHandleSubscribeClick = vi.fn()
|
||||
const mockHandleBackToPricing = vi.fn()
|
||||
const mockHandleAddCreditCard = vi.fn()
|
||||
const mockHandleConfirmTransition = vi.fn()
|
||||
const mockHandleResubscribe = vi.fn()
|
||||
const mockCheckoutStep = ref<'pricing' | 'preview'>('pricing')
|
||||
const mockPreviewData = ref<{ transition_type: string } | null>(null)
|
||||
|
||||
vi.mock('@/platform/workspace/composables/useSubscriptionCheckout', () => ({
|
||||
useSubscriptionCheckout: () => ({
|
||||
checkoutStep: mockCheckoutStep,
|
||||
isLoadingPreview: ref(false),
|
||||
loadingTier: ref(null),
|
||||
isSubscribing: ref(false),
|
||||
isResubscribing: ref(false),
|
||||
previewData: mockPreviewData,
|
||||
selectedTierKey: ref('standard'),
|
||||
selectedBillingCycle: ref('yearly'),
|
||||
isPolling: ref(false),
|
||||
handleSubscribeClick: mockHandleSubscribeClick,
|
||||
handleBackToPricing: mockHandleBackToPricing,
|
||||
handleAddCreditCard: mockHandleAddCreditCard,
|
||||
handleConfirmTransition: mockHandleConfirmTransition,
|
||||
handleResubscribe: mockHandleResubscribe
|
||||
})
|
||||
}))
|
||||
|
||||
const i18n = createI18n({
|
||||
legacy: false,
|
||||
locale: 'en',
|
||||
messages: {
|
||||
en: {
|
||||
g: { back: 'Back', close: 'Close' },
|
||||
subscription: {
|
||||
plansForWorkspace: 'Plans for {workspace}',
|
||||
teamWorkspace: 'Team'
|
||||
},
|
||||
credits: {
|
||||
topUp: {
|
||||
insufficientTitle: 'Insufficient Credits',
|
||||
insufficientMessage: 'You have run out of credits.'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const PricingTableStub = {
|
||||
name: 'PricingTableWorkspace',
|
||||
template: `<div data-testid="pricing-table">
|
||||
<button data-testid="subscribe-btn" @click="$emit('subscribe', { tierKey: 'standard', billingCycle: 'yearly' })">Subscribe</button>
|
||||
<button data-testid="resubscribe-btn" @click="$emit('resubscribe')">Resubscribe</button>
|
||||
</div>`
|
||||
}
|
||||
|
||||
const AddPaymentPreviewStub = {
|
||||
name: 'SubscriptionAddPaymentPreviewWorkspace',
|
||||
template: `<div data-testid="add-payment-preview">
|
||||
<button data-testid="add-card-btn" @click="$emit('addCreditCard')">Add Card</button>
|
||||
</div>`
|
||||
}
|
||||
|
||||
const TransitionPreviewStub = {
|
||||
name: 'SubscriptionTransitionPreviewWorkspace',
|
||||
template: `<div data-testid="transition-preview">
|
||||
<button data-testid="confirm-btn" @click="$emit('confirm')">Confirm</button>
|
||||
</div>`
|
||||
}
|
||||
|
||||
function renderComponent(
|
||||
props: { onClose?: () => void; reason?: SubscriptionDialogReason } = {}
|
||||
) {
|
||||
return render(SubscriptionRequiredDialogContentWorkspace, {
|
||||
props: {
|
||||
onClose: props.onClose ?? vi.fn(),
|
||||
...(props.reason ? { reason: props.reason } : {})
|
||||
},
|
||||
global: {
|
||||
plugins: [
|
||||
createTestingPinia({ createSpy: vi.fn, stubActions: false }),
|
||||
i18n
|
||||
],
|
||||
stubs: {
|
||||
PricingTableWorkspace: PricingTableStub,
|
||||
SubscriptionAddPaymentPreviewWorkspace: AddPaymentPreviewStub,
|
||||
SubscriptionTransitionPreviewWorkspace: TransitionPreviewStub
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
describe('SubscriptionRequiredDialogContentWorkspace', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockCheckoutStep.value = 'pricing'
|
||||
mockPreviewData.value = null
|
||||
})
|
||||
|
||||
it('shows pricing table on pricing step', () => {
|
||||
renderComponent()
|
||||
expect(screen.getByTestId('pricing-table')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('add-payment-preview')).not.toBeInTheDocument()
|
||||
expect(screen.queryByTestId('transition-preview')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows close button and hides back button on pricing step', () => {
|
||||
renderComponent()
|
||||
expect(screen.getByLabelText('Close')).toBeInTheDocument()
|
||||
expect(screen.queryByLabelText('Back')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('calls onClose when close button is clicked', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onClose = vi.fn()
|
||||
renderComponent({ onClose })
|
||||
|
||||
await user.click(screen.getByLabelText('Close'))
|
||||
|
||||
expect(onClose).toHaveBeenCalledOnce()
|
||||
})
|
||||
|
||||
it('shows back button on preview step', () => {
|
||||
mockCheckoutStep.value = 'preview'
|
||||
mockPreviewData.value = { transition_type: 'new_subscription' }
|
||||
renderComponent()
|
||||
expect(screen.getByLabelText('Back')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows insufficient credits message when reason is out_of_credits', () => {
|
||||
renderComponent({ reason: 'out_of_credits' })
|
||||
expect(screen.getByText('Insufficient Credits')).toBeInTheDocument()
|
||||
expect(screen.getByText('You have run out of credits.')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('does not show insufficient credits message without reason', () => {
|
||||
renderComponent()
|
||||
expect(screen.queryByText('Insufficient Credits')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows new subscription preview when transition_type is new_subscription', () => {
|
||||
mockCheckoutStep.value = 'preview'
|
||||
mockPreviewData.value = { transition_type: 'new_subscription' }
|
||||
renderComponent()
|
||||
expect(screen.getByTestId('add-payment-preview')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('transition-preview')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows transition preview when transition_type is upgrade', () => {
|
||||
mockCheckoutStep.value = 'preview'
|
||||
mockPreviewData.value = { transition_type: 'upgrade' }
|
||||
renderComponent()
|
||||
expect(screen.getByTestId('transition-preview')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('add-payment-preview')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('wires subscribe event to handleSubscribeClick', async () => {
|
||||
const user = userEvent.setup()
|
||||
renderComponent()
|
||||
|
||||
await user.click(screen.getByTestId('subscribe-btn'))
|
||||
|
||||
expect(mockHandleSubscribeClick).toHaveBeenCalledWith({
|
||||
tierKey: 'standard',
|
||||
billingCycle: 'yearly'
|
||||
})
|
||||
})
|
||||
|
||||
it('wires resubscribe event to handleResubscribe', async () => {
|
||||
const user = userEvent.setup()
|
||||
renderComponent()
|
||||
|
||||
await user.click(screen.getByTestId('resubscribe-btn'))
|
||||
|
||||
expect(mockHandleResubscribe).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('wires back button to handleBackToPricing', async () => {
|
||||
const user = userEvent.setup()
|
||||
mockCheckoutStep.value = 'preview'
|
||||
mockPreviewData.value = { transition_type: 'new_subscription' }
|
||||
renderComponent()
|
||||
|
||||
await user.click(screen.getByLabelText('Back'))
|
||||
|
||||
expect(mockHandleBackToPricing).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
@@ -18,7 +18,7 @@
|
||||
variant="muted-textonly"
|
||||
class="absolute top-2.5 right-2.5 shrink-0 rounded-full text-text-secondary hover:bg-white/10"
|
||||
:aria-label="$t('g.close')"
|
||||
@click="handleClose"
|
||||
@click="onClose"
|
||||
>
|
||||
<i class="pi pi-times text-xl" />
|
||||
</Button>
|
||||
@@ -94,28 +94,14 @@
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { useToast } from 'primevue/usetoast'
|
||||
import { computed, ref } from 'vue'
|
||||
import { useI18n } from 'vue-i18n'
|
||||
|
||||
import Button from '@/components/ui/button/Button.vue'
|
||||
import { useBillingContext } from '@/composables/billing/useBillingContext'
|
||||
import { getComfyPlatformBaseUrl } from '@/config/comfyApi'
|
||||
import type { TierKey } from '@/platform/cloud/subscription/constants/tierPricing'
|
||||
import { useTelemetry } from '@/platform/telemetry'
|
||||
import type { BillingCycle } from '@/platform/cloud/subscription/utils/subscriptionTierRank'
|
||||
import type { PreviewSubscribeResponse } from '@/platform/workspace/api/workspaceApi'
|
||||
import { workspaceApi } from '@/platform/workspace/api/workspaceApi'
|
||||
import { useBillingOperationStore } from '@/platform/workspace/stores/billingOperationStore'
|
||||
import type { SubscriptionDialogReason } from '@/platform/cloud/subscription/composables/useSubscriptionDialog'
|
||||
import { useSubscriptionCheckout } from '@/platform/workspace/composables/useSubscriptionCheckout'
|
||||
|
||||
import PricingTableWorkspace from './PricingTableWorkspace.vue'
|
||||
import SubscriptionAddPaymentPreviewWorkspace from './SubscriptionAddPaymentPreviewWorkspace.vue'
|
||||
import SubscriptionTransitionPreviewWorkspace from './SubscriptionTransitionPreviewWorkspace.vue'
|
||||
|
||||
type CheckoutStep = 'pricing' | 'preview'
|
||||
type CheckoutTierKey = Exclude<TierKey, 'free' | 'founder'>
|
||||
|
||||
const { onClose, reason } = defineProps<{
|
||||
onClose: () => void
|
||||
reason?: SubscriptionDialogReason
|
||||
@@ -125,227 +111,22 @@ const emit = defineEmits<{
|
||||
close: [subscribed: boolean]
|
||||
}>()
|
||||
|
||||
const { t } = useI18n()
|
||||
const toast = useToast()
|
||||
const { subscribe, previewSubscribe, plans, fetchStatus, fetchBalance } =
|
||||
useBillingContext()
|
||||
const telemetry = useTelemetry()
|
||||
const billingOperationStore = useBillingOperationStore()
|
||||
const isPolling = computed(() => billingOperationStore.hasPendingOperations)
|
||||
|
||||
const checkoutStep = ref<CheckoutStep>('pricing')
|
||||
const isLoadingPreview = ref(false)
|
||||
const loadingTier = ref<CheckoutTierKey | null>(null)
|
||||
const isSubscribing = ref(false)
|
||||
const isResubscribing = ref(false)
|
||||
const previewData = ref<PreviewSubscribeResponse | null>(null)
|
||||
const selectedTierKey = ref<CheckoutTierKey | null>(null)
|
||||
const selectedBillingCycle = ref<BillingCycle>('yearly')
|
||||
|
||||
function getApiPlanSlug(
|
||||
tierKey: CheckoutTierKey,
|
||||
billingCycle: BillingCycle
|
||||
): string | null {
|
||||
const apiDuration = billingCycle === 'yearly' ? 'ANNUAL' : 'MONTHLY'
|
||||
const apiTier = tierKey.toUpperCase()
|
||||
const plan = plans.value.find(
|
||||
(p) => p.tier === apiTier && p.duration === apiDuration
|
||||
)
|
||||
return plan?.slug ?? null
|
||||
}
|
||||
|
||||
async function handleSubscribeClick(payload: {
|
||||
tierKey: CheckoutTierKey
|
||||
billingCycle: BillingCycle
|
||||
}) {
|
||||
const { tierKey, billingCycle } = payload
|
||||
|
||||
isLoadingPreview.value = true
|
||||
loadingTier.value = tierKey
|
||||
selectedTierKey.value = tierKey
|
||||
selectedBillingCycle.value = billingCycle
|
||||
|
||||
try {
|
||||
const planSlug = getApiPlanSlug(tierKey, billingCycle)
|
||||
if (!planSlug) {
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Unable to subscribe',
|
||||
detail: 'This plan is not available'
|
||||
})
|
||||
return
|
||||
}
|
||||
const response = await previewSubscribe(planSlug)
|
||||
|
||||
if (!response || !response.allowed) {
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Unable to subscribe',
|
||||
detail: response?.reason || 'This plan is not available'
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
previewData.value = response
|
||||
checkoutStep.value = 'preview'
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to load subscription preview'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isLoadingPreview.value = false
|
||||
loadingTier.value = null
|
||||
}
|
||||
}
|
||||
|
||||
function handleBackToPricing() {
|
||||
checkoutStep.value = 'pricing'
|
||||
previewData.value = null
|
||||
}
|
||||
|
||||
async function handleAddCreditCard() {
|
||||
if (!selectedTierKey.value) return
|
||||
|
||||
isSubscribing.value = true
|
||||
try {
|
||||
const planSlug = getApiPlanSlug(
|
||||
selectedTierKey.value,
|
||||
selectedBillingCycle.value
|
||||
)
|
||||
if (!planSlug) return
|
||||
const response = await subscribe(
|
||||
planSlug,
|
||||
`${getComfyPlatformBaseUrl()}/payment/success`,
|
||||
`${getComfyPlatformBaseUrl()}/payment/failed`
|
||||
)
|
||||
|
||||
if (!response) return
|
||||
|
||||
if (response.status === 'subscribed') {
|
||||
telemetry?.trackMonthlySubscriptionSucceeded()
|
||||
toast.add({
|
||||
severity: 'success',
|
||||
summary: t('subscription.required.pollingSuccess'),
|
||||
life: 5000
|
||||
})
|
||||
await Promise.all([fetchStatus(), fetchBalance()])
|
||||
emit('close', true)
|
||||
} else if (
|
||||
response.status === 'needs_payment_method' &&
|
||||
response.payment_method_url
|
||||
) {
|
||||
window.open(response.payment_method_url, '_blank')
|
||||
billingOperationStore.startOperation(
|
||||
response.billing_op_id,
|
||||
'subscription'
|
||||
)
|
||||
} else if (response.status === 'pending_payment') {
|
||||
billingOperationStore.startOperation(
|
||||
response.billing_op_id,
|
||||
'subscription'
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error ? error.message : 'Failed to subscribe'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isSubscribing.value = false
|
||||
}
|
||||
}
|
||||
|
||||
async function handleConfirmTransition() {
|
||||
if (!selectedTierKey.value) return
|
||||
|
||||
isSubscribing.value = true
|
||||
try {
|
||||
const planSlug = getApiPlanSlug(
|
||||
selectedTierKey.value,
|
||||
selectedBillingCycle.value
|
||||
)
|
||||
if (!planSlug) return
|
||||
const response = await subscribe(
|
||||
planSlug,
|
||||
`${getComfyPlatformBaseUrl()}/payment/success`,
|
||||
`${getComfyPlatformBaseUrl()}/payment/failed`
|
||||
)
|
||||
|
||||
if (!response) return
|
||||
|
||||
if (response.status === 'subscribed') {
|
||||
telemetry?.trackMonthlySubscriptionSucceeded()
|
||||
toast.add({
|
||||
severity: 'success',
|
||||
summary: t('subscription.required.pollingSuccess'),
|
||||
life: 5000
|
||||
})
|
||||
await Promise.all([fetchStatus(), fetchBalance()])
|
||||
emit('close', true)
|
||||
} else if (
|
||||
response.status === 'needs_payment_method' &&
|
||||
response.payment_method_url
|
||||
) {
|
||||
window.open(response.payment_method_url, '_blank')
|
||||
billingOperationStore.startOperation(
|
||||
response.billing_op_id,
|
||||
'subscription'
|
||||
)
|
||||
} else if (response.status === 'pending_payment') {
|
||||
billingOperationStore.startOperation(
|
||||
response.billing_op_id,
|
||||
'subscription'
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error ? error.message : 'Failed to update subscription'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isSubscribing.value = false
|
||||
}
|
||||
}
|
||||
|
||||
async function handleResubscribe() {
|
||||
isResubscribing.value = true
|
||||
try {
|
||||
await workspaceApi.resubscribe()
|
||||
toast.add({
|
||||
severity: 'success',
|
||||
summary: t('subscription.resubscribeSuccess'),
|
||||
life: 5000
|
||||
})
|
||||
await Promise.all([fetchStatus(), fetchBalance()])
|
||||
emit('close', true)
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error ? error.message : 'Failed to resubscribe'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isResubscribing.value = false
|
||||
}
|
||||
}
|
||||
|
||||
function handleClose() {
|
||||
onClose()
|
||||
}
|
||||
const {
|
||||
checkoutStep,
|
||||
isLoadingPreview,
|
||||
loadingTier,
|
||||
isSubscribing,
|
||||
isResubscribing,
|
||||
previewData,
|
||||
selectedTierKey,
|
||||
selectedBillingCycle,
|
||||
isPolling,
|
||||
handleSubscribeClick,
|
||||
handleBackToPricing,
|
||||
handleAddCreditCard,
|
||||
handleConfirmTransition,
|
||||
handleResubscribe
|
||||
} = useSubscriptionCheckout(emit)
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
|
||||
@@ -0,0 +1,369 @@
|
||||
import { createTestingPinia } from '@pinia/testing'
|
||||
import { setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { computed } from 'vue'
|
||||
|
||||
import type { Plan } from '@/platform/workspace/api/workspaceApi'
|
||||
|
||||
import { findPlanSlug } from './useSubscriptionCheckout'
|
||||
|
||||
function makeStandardYearly(): Plan {
|
||||
return {
|
||||
slug: 'standard-yearly',
|
||||
tier: 'STANDARD',
|
||||
duration: 'ANNUAL',
|
||||
price_cents: 1600,
|
||||
credits_cents: 4200,
|
||||
max_seats: 1,
|
||||
availability: { available: true },
|
||||
seat_summary: {
|
||||
seat_count: 1,
|
||||
total_cost_cents: 1600,
|
||||
total_credits_cents: 4200
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function makeCreatorMonthly(): Plan {
|
||||
return {
|
||||
slug: 'creator-monthly',
|
||||
tier: 'CREATOR',
|
||||
duration: 'MONTHLY',
|
||||
price_cents: 3500,
|
||||
credits_cents: 7400,
|
||||
max_seats: 5,
|
||||
availability: { available: true },
|
||||
seat_summary: {
|
||||
seat_count: 1,
|
||||
total_cost_cents: 3500,
|
||||
total_credits_cents: 7400
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function allPlans(): Plan[] {
|
||||
return [makeStandardYearly(), makeCreatorMonthly()]
|
||||
}
|
||||
|
||||
describe('findPlanSlug', () => {
|
||||
it('finds an annual plan by tier key and yearly billing cycle', () => {
|
||||
expect(findPlanSlug(allPlans(), 'standard', 'yearly')).toBe(
|
||||
'standard-yearly'
|
||||
)
|
||||
})
|
||||
|
||||
it('finds a monthly plan by tier key and monthly billing cycle', () => {
|
||||
expect(findPlanSlug(allPlans(), 'creator', 'monthly')).toBe(
|
||||
'creator-monthly'
|
||||
)
|
||||
})
|
||||
|
||||
it('returns null when no plan matches', () => {
|
||||
expect(findPlanSlug(allPlans(), 'standard', 'monthly')).toBeNull()
|
||||
})
|
||||
|
||||
it('returns null for empty plans', () => {
|
||||
expect(findPlanSlug([], 'standard', 'yearly')).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
const {
|
||||
mockSubscribe,
|
||||
mockPreviewSubscribe,
|
||||
mockFetchStatus,
|
||||
mockFetchBalance,
|
||||
mockPlans,
|
||||
mockResubscribe,
|
||||
mockToastAdd
|
||||
} = vi.hoisted(() => ({
|
||||
mockSubscribe: vi.fn(),
|
||||
mockPreviewSubscribe: vi.fn(),
|
||||
mockFetchStatus: vi.fn(),
|
||||
mockFetchBalance: vi.fn(),
|
||||
mockPlans: { value: [] as Plan[] },
|
||||
mockResubscribe: vi.fn(),
|
||||
mockToastAdd: vi.fn()
|
||||
}))
|
||||
|
||||
vi.mock('@/composables/billing/useBillingContext', () => ({
|
||||
useBillingContext: () => ({
|
||||
subscribe: mockSubscribe,
|
||||
previewSubscribe: mockPreviewSubscribe,
|
||||
plans: computed(() => mockPlans.value),
|
||||
fetchStatus: mockFetchStatus,
|
||||
fetchBalance: mockFetchBalance
|
||||
})
|
||||
}))
|
||||
|
||||
vi.mock('@/platform/workspace/api/workspaceApi', () => ({
|
||||
workspaceApi: { resubscribe: mockResubscribe }
|
||||
}))
|
||||
|
||||
vi.mock('@/config/comfyApi', () => ({
|
||||
getComfyPlatformBaseUrl: () => 'https://platform.comfy.org'
|
||||
}))
|
||||
|
||||
vi.mock('primevue/usetoast', () => ({
|
||||
useToast: () => ({ add: mockToastAdd })
|
||||
}))
|
||||
|
||||
vi.mock('@/platform/telemetry', () => ({
|
||||
useTelemetry: () => ({ trackMonthlySubscriptionSucceeded: vi.fn() })
|
||||
}))
|
||||
|
||||
vi.mock('vue-i18n', async (importOriginal) => {
|
||||
const actual = await importOriginal()
|
||||
return {
|
||||
...(actual as Record<string, unknown>),
|
||||
useI18n: () => ({
|
||||
t: (key: string) => key
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
describe('useSubscriptionCheckout', () => {
|
||||
let emit: ReturnType<typeof vi.fn>
|
||||
|
||||
async function setup() {
|
||||
const { useSubscriptionCheckout } =
|
||||
await import('./useSubscriptionCheckout')
|
||||
return useSubscriptionCheckout(emit as never)
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createTestingPinia({ stubActions: false }))
|
||||
vi.clearAllMocks()
|
||||
mockPlans.value = allPlans()
|
||||
emit = vi.fn()
|
||||
})
|
||||
|
||||
describe('handleSubscribeClick', () => {
|
||||
it('transitions to preview on successful preview', async () => {
|
||||
const checkout = await setup()
|
||||
const preview = {
|
||||
allowed: true,
|
||||
transition_type: 'new_subscription' as const,
|
||||
effective_at: '2025-01-01',
|
||||
is_immediate: true,
|
||||
cost_today_cents: 1600,
|
||||
cost_next_period_cents: 1600,
|
||||
credits_today_cents: 4200,
|
||||
credits_next_period_cents: 4200,
|
||||
new_plan: makeStandardYearly().seat_summary
|
||||
}
|
||||
mockPreviewSubscribe.mockResolvedValueOnce(preview)
|
||||
|
||||
await checkout.handleSubscribeClick({
|
||||
tierKey: 'standard',
|
||||
billingCycle: 'yearly'
|
||||
})
|
||||
|
||||
expect(checkout.checkoutStep.value).toBe('preview')
|
||||
expect(checkout.previewData.value).toStrictEqual(preview)
|
||||
})
|
||||
|
||||
it('shows error toast when preview is disallowed', async () => {
|
||||
const checkout = await setup()
|
||||
mockPreviewSubscribe.mockResolvedValueOnce({
|
||||
allowed: false,
|
||||
reason: 'Not allowed'
|
||||
})
|
||||
|
||||
await checkout.handleSubscribeClick({
|
||||
tierKey: 'standard',
|
||||
billingCycle: 'yearly'
|
||||
})
|
||||
|
||||
expect(checkout.checkoutStep.value).toBe('pricing')
|
||||
expect(mockToastAdd).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
severity: 'error',
|
||||
detail: 'Not allowed'
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('shows error toast when plan slug is not found', async () => {
|
||||
const checkout = await setup()
|
||||
mockPlans.value = []
|
||||
|
||||
await checkout.handleSubscribeClick({
|
||||
tierKey: 'standard',
|
||||
billingCycle: 'yearly'
|
||||
})
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
severity: 'error',
|
||||
detail: 'This plan is not available'
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('shows error toast on network failure', async () => {
|
||||
const checkout = await setup()
|
||||
mockPreviewSubscribe.mockRejectedValueOnce(new Error('Network error'))
|
||||
|
||||
await checkout.handleSubscribeClick({
|
||||
tierKey: 'standard',
|
||||
billingCycle: 'yearly'
|
||||
})
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
severity: 'error',
|
||||
detail: 'Network error'
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('resolves monthly billing cycle to correct plan slug', async () => {
|
||||
const checkout = await setup()
|
||||
mockPreviewSubscribe.mockResolvedValueOnce({
|
||||
allowed: true,
|
||||
transition_type: 'new_subscription'
|
||||
})
|
||||
|
||||
await checkout.handleSubscribeClick({
|
||||
tierKey: 'creator',
|
||||
billingCycle: 'monthly'
|
||||
})
|
||||
|
||||
expect(mockPreviewSubscribe).toHaveBeenCalledWith('creator-monthly')
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleBackToPricing', () => {
|
||||
it('resets to pricing step and clears preview data', async () => {
|
||||
const checkout = await setup()
|
||||
checkout.checkoutStep.value = 'preview'
|
||||
checkout.previewData.value = {} as never
|
||||
|
||||
checkout.handleBackToPricing()
|
||||
|
||||
expect(checkout.checkoutStep.value).toBe('pricing')
|
||||
expect(checkout.previewData.value).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleAddCreditCard', () => {
|
||||
it('emits close on subscribed status', async () => {
|
||||
const checkout = await setup()
|
||||
checkout.selectedTierKey.value = 'standard'
|
||||
checkout.selectedBillingCycle.value = 'yearly'
|
||||
mockSubscribe.mockResolvedValueOnce({
|
||||
status: 'subscribed',
|
||||
billing_op_id: 'op-1'
|
||||
})
|
||||
mockFetchStatus.mockResolvedValueOnce(undefined)
|
||||
mockFetchBalance.mockResolvedValueOnce(undefined)
|
||||
|
||||
await checkout.handleAddCreditCard()
|
||||
|
||||
expect(mockSubscribe).toHaveBeenCalledWith(
|
||||
'standard-yearly',
|
||||
'https://platform.comfy.org/payment/success',
|
||||
'https://platform.comfy.org/payment/failed'
|
||||
)
|
||||
expect(emit).toHaveBeenCalledWith('close', true)
|
||||
})
|
||||
|
||||
it('opens payment URL when needs_payment_method', async () => {
|
||||
const checkout = await setup()
|
||||
checkout.selectedTierKey.value = 'standard'
|
||||
checkout.selectedBillingCycle.value = 'yearly'
|
||||
mockSubscribe.mockResolvedValueOnce({
|
||||
status: 'needs_payment_method',
|
||||
billing_op_id: 'op-2',
|
||||
payment_method_url: 'https://stripe.com/pay'
|
||||
})
|
||||
|
||||
const openSpy = vi.spyOn(window, 'open').mockImplementation(() => null)
|
||||
await checkout.handleAddCreditCard()
|
||||
|
||||
expect(openSpy).toHaveBeenCalledWith('https://stripe.com/pay', '_blank')
|
||||
openSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('shows error toast on subscribe failure', async () => {
|
||||
const checkout = await setup()
|
||||
checkout.selectedTierKey.value = 'standard'
|
||||
checkout.selectedBillingCycle.value = 'yearly'
|
||||
mockSubscribe.mockRejectedValueOnce(new Error('Payment failed'))
|
||||
|
||||
await checkout.handleAddCreditCard()
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
severity: 'error',
|
||||
detail: 'Payment failed'
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleConfirmTransition', () => {
|
||||
it('emits close on subscribed status', async () => {
|
||||
const checkout = await setup()
|
||||
checkout.selectedTierKey.value = 'standard'
|
||||
checkout.selectedBillingCycle.value = 'yearly'
|
||||
mockSubscribe.mockResolvedValueOnce({
|
||||
status: 'subscribed',
|
||||
billing_op_id: 'op-3'
|
||||
})
|
||||
mockFetchStatus.mockResolvedValueOnce(undefined)
|
||||
mockFetchBalance.mockResolvedValueOnce(undefined)
|
||||
|
||||
await checkout.handleConfirmTransition()
|
||||
|
||||
expect(emit).toHaveBeenCalledWith('close', true)
|
||||
})
|
||||
|
||||
it('shows error toast on failure', async () => {
|
||||
const checkout = await setup()
|
||||
checkout.selectedTierKey.value = 'standard'
|
||||
checkout.selectedBillingCycle.value = 'yearly'
|
||||
mockSubscribe.mockRejectedValueOnce(new Error('Transition error'))
|
||||
|
||||
await checkout.handleConfirmTransition()
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
severity: 'error',
|
||||
detail: 'Transition error'
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleResubscribe', () => {
|
||||
it('emits close on success', async () => {
|
||||
const checkout = await setup()
|
||||
mockResubscribe.mockResolvedValueOnce({
|
||||
billing_op_id: 'op-4',
|
||||
status: 'active'
|
||||
})
|
||||
mockFetchStatus.mockResolvedValueOnce(undefined)
|
||||
mockFetchBalance.mockResolvedValueOnce(undefined)
|
||||
|
||||
await checkout.handleResubscribe()
|
||||
|
||||
expect(mockResubscribe).toHaveBeenCalled()
|
||||
expect(emit).toHaveBeenCalledWith('close', true)
|
||||
})
|
||||
|
||||
it('shows error toast on failure', async () => {
|
||||
const checkout = await setup()
|
||||
mockResubscribe.mockRejectedValueOnce(new Error('Resubscribe failed'))
|
||||
|
||||
await checkout.handleResubscribe()
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
severity: 'error',
|
||||
detail: 'Resubscribe failed'
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
210
src/platform/workspace/composables/useSubscriptionCheckout.ts
Normal file
210
src/platform/workspace/composables/useSubscriptionCheckout.ts
Normal file
@@ -0,0 +1,210 @@
|
||||
import { useToast } from 'primevue/usetoast'
|
||||
import { computed, ref } from 'vue'
|
||||
import { useI18n } from 'vue-i18n'
|
||||
|
||||
import { useBillingContext } from '@/composables/billing/useBillingContext'
|
||||
import { getComfyPlatformBaseUrl } from '@/config/comfyApi'
|
||||
import type { TierKey } from '@/platform/cloud/subscription/constants/tierPricing'
|
||||
import type { BillingCycle } from '@/platform/cloud/subscription/utils/subscriptionTierRank'
|
||||
import { useTelemetry } from '@/platform/telemetry'
|
||||
import type {
|
||||
Plan,
|
||||
PreviewSubscribeResponse
|
||||
} from '@/platform/workspace/api/workspaceApi'
|
||||
import { workspaceApi } from '@/platform/workspace/api/workspaceApi'
|
||||
import { useBillingOperationStore } from '@/platform/workspace/stores/billingOperationStore'
|
||||
|
||||
type CheckoutStep = 'pricing' | 'preview'
|
||||
type CheckoutTierKey = Exclude<TierKey, 'free' | 'founder'>
|
||||
|
||||
export function findPlanSlug(
|
||||
plans: Plan[],
|
||||
tierKey: CheckoutTierKey,
|
||||
billingCycle: BillingCycle
|
||||
): string | null {
|
||||
const apiDuration = billingCycle === 'yearly' ? 'ANNUAL' : 'MONTHLY'
|
||||
const apiTier = tierKey.toUpperCase()
|
||||
const plan = plans.find(
|
||||
(p) => p.tier === apiTier && p.duration === apiDuration
|
||||
)
|
||||
return plan?.slug ?? null
|
||||
}
|
||||
|
||||
export function useSubscriptionCheckout(emit: {
|
||||
(e: 'close', subscribed: boolean): void
|
||||
}) {
|
||||
const { t } = useI18n()
|
||||
const toast = useToast()
|
||||
const { subscribe, previewSubscribe, plans, fetchStatus, fetchBalance } =
|
||||
useBillingContext()
|
||||
const telemetry = useTelemetry()
|
||||
const billingOperationStore = useBillingOperationStore()
|
||||
|
||||
const checkoutStep = ref<CheckoutStep>('pricing')
|
||||
const isLoadingPreview = ref(false)
|
||||
const loadingTier = ref<CheckoutTierKey | null>(null)
|
||||
const isSubscribing = ref(false)
|
||||
const isResubscribing = ref(false)
|
||||
const previewData = ref<PreviewSubscribeResponse | null>(null)
|
||||
const selectedTierKey = ref<CheckoutTierKey | null>(null)
|
||||
const selectedBillingCycle = ref<BillingCycle>('yearly')
|
||||
const isPolling = computed(() => billingOperationStore.hasPendingOperations)
|
||||
|
||||
function getApiPlanSlug(
|
||||
tierKey: CheckoutTierKey,
|
||||
billingCycle: BillingCycle
|
||||
): string | null {
|
||||
return findPlanSlug(plans.value, tierKey, billingCycle)
|
||||
}
|
||||
|
||||
async function handleSubscribeClick(payload: {
|
||||
tierKey: CheckoutTierKey
|
||||
billingCycle: BillingCycle
|
||||
}) {
|
||||
const { tierKey, billingCycle } = payload
|
||||
|
||||
isLoadingPreview.value = true
|
||||
loadingTier.value = tierKey
|
||||
selectedTierKey.value = tierKey
|
||||
selectedBillingCycle.value = billingCycle
|
||||
|
||||
try {
|
||||
const planSlug = getApiPlanSlug(tierKey, billingCycle)
|
||||
if (!planSlug) {
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Unable to subscribe',
|
||||
detail: 'This plan is not available'
|
||||
})
|
||||
return
|
||||
}
|
||||
const response = await previewSubscribe(planSlug)
|
||||
|
||||
if (!response || !response.allowed) {
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Unable to subscribe',
|
||||
detail: response?.reason || 'This plan is not available'
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
previewData.value = response
|
||||
checkoutStep.value = 'preview'
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to load subscription preview'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isLoadingPreview.value = false
|
||||
loadingTier.value = null
|
||||
}
|
||||
}
|
||||
|
||||
function handleBackToPricing() {
|
||||
checkoutStep.value = 'pricing'
|
||||
previewData.value = null
|
||||
}
|
||||
|
||||
async function handleSubscription() {
|
||||
if (!selectedTierKey.value) return
|
||||
|
||||
isSubscribing.value = true
|
||||
try {
|
||||
const planSlug = getApiPlanSlug(
|
||||
selectedTierKey.value,
|
||||
selectedBillingCycle.value
|
||||
)
|
||||
if (!planSlug) return
|
||||
const response = await subscribe(
|
||||
planSlug,
|
||||
`${getComfyPlatformBaseUrl()}/payment/success`,
|
||||
`${getComfyPlatformBaseUrl()}/payment/failed`
|
||||
)
|
||||
|
||||
if (!response) return
|
||||
|
||||
if (response.status === 'subscribed') {
|
||||
telemetry?.trackMonthlySubscriptionSucceeded()
|
||||
toast.add({
|
||||
severity: 'success',
|
||||
summary: t('subscription.required.pollingSuccess'),
|
||||
life: 5000
|
||||
})
|
||||
await Promise.all([fetchStatus(), fetchBalance()])
|
||||
emit('close', true)
|
||||
} else if (
|
||||
response.status === 'needs_payment_method' &&
|
||||
response.payment_method_url
|
||||
) {
|
||||
window.open(response.payment_method_url, '_blank')
|
||||
billingOperationStore.startOperation(
|
||||
response.billing_op_id,
|
||||
'subscription'
|
||||
)
|
||||
} else if (response.status === 'pending_payment') {
|
||||
billingOperationStore.startOperation(
|
||||
response.billing_op_id,
|
||||
'subscription'
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error ? error.message : 'Failed to subscribe'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isSubscribing.value = false
|
||||
}
|
||||
}
|
||||
|
||||
async function handleResubscribe() {
|
||||
isResubscribing.value = true
|
||||
try {
|
||||
await workspaceApi.resubscribe()
|
||||
toast.add({
|
||||
severity: 'success',
|
||||
summary: t('subscription.resubscribeSuccess'),
|
||||
life: 5000
|
||||
})
|
||||
await Promise.all([fetchStatus(), fetchBalance()])
|
||||
emit('close', true)
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error ? error.message : 'Failed to resubscribe'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isResubscribing.value = false
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
checkoutStep,
|
||||
isLoadingPreview,
|
||||
loadingTier,
|
||||
isSubscribing,
|
||||
isResubscribing,
|
||||
previewData,
|
||||
selectedTierKey,
|
||||
selectedBillingCycle,
|
||||
isPolling,
|
||||
handleSubscribeClick,
|
||||
handleBackToPricing,
|
||||
handleAddCreditCard: handleSubscription,
|
||||
handleConfirmTransition: handleSubscription,
|
||||
handleResubscribe
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,272 @@
|
||||
import { createTestingPinia } from '@pinia/testing'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import { render, screen, waitFor } from '@testing-library/vue'
|
||||
import axios, { AxiosError, AxiosHeaders } from 'axios'
|
||||
import type * as AxiosModule from 'axios'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createI18n } from 'vue-i18n'
|
||||
|
||||
import RichComboWidget from '@/renderer/extensions/vueNodes/widgets/components/RichComboWidget.vue'
|
||||
import type { ComboInputSpec } from '@/schemas/nodeDef/nodeDefSchemaV2'
|
||||
import type {
|
||||
RemoteComboConfig,
|
||||
RemoteItemSchema
|
||||
} from '@/schemas/nodeDefSchema'
|
||||
import type { SimplifiedWidget } from '@/types/simplifiedWidget'
|
||||
|
||||
import { createMockWidget } from './widgetTestUtils'
|
||||
|
||||
// Preserve everything axios exports — only `default.get` is the call site we
|
||||
// drive. Other modules in the import graph (e.g. workspaceApi) call
|
||||
// axios.create() at module-load time, so we can't replace the default outright.
|
||||
vi.mock('axios', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof AxiosModule>()
|
||||
return {
|
||||
...actual,
|
||||
default: { ...actual.default, get: vi.fn() }
|
||||
}
|
||||
})
|
||||
|
||||
// All four auth-related composables are mocked at module level so the SFC's
|
||||
// imports never pull in firebase / vuefire. Their return shapes only need to
|
||||
// satisfy the call sites the widget actually hits.
|
||||
vi.mock('@/composables/useFeatureFlags', () => ({
|
||||
useFeatureFlags: () => ({ flags: { teamWorkspacesEnabled: false } })
|
||||
}))
|
||||
vi.mock('@/platform/workspace/stores/workspaceAuthStore', () => ({
|
||||
useWorkspaceAuthStore: () => ({ currentWorkspace: null })
|
||||
}))
|
||||
vi.mock('@/stores/authStore', () => ({
|
||||
useAuthStore: () => ({
|
||||
userId: undefined,
|
||||
getAuthHeader: vi.fn(() => Promise.resolve(null))
|
||||
})
|
||||
}))
|
||||
vi.mock('@/stores/apiKeyAuthStore', () => ({
|
||||
useApiKeyAuthStore: () => ({ getApiKey: () => null })
|
||||
}))
|
||||
|
||||
const i18n = createI18n({ legacy: false, locale: 'en', messages: { en: {} } })
|
||||
|
||||
// Minimal stub: surfaces the props the widget binds (so we can assert on them)
|
||||
// and exposes click affordances that emit `update:selected` for the user-action
|
||||
// tests. The real FormDropdown's rendering is tested in its own suite.
|
||||
const FormDropdownStub = {
|
||||
name: 'FormDropdown',
|
||||
props: [
|
||||
'selected',
|
||||
'items',
|
||||
'placeholder',
|
||||
'multiple',
|
||||
'showSort',
|
||||
'showLayoutSwitcher',
|
||||
'searcher',
|
||||
'layoutMode'
|
||||
],
|
||||
emits: ['update:selected', 'update:layoutMode'],
|
||||
template: `
|
||||
<div data-testid="dropdown">
|
||||
<span data-testid="placeholder">{{ placeholder }}</span>
|
||||
<span data-testid="items-count">{{ items.length }}</span>
|
||||
<button
|
||||
v-for="item in items"
|
||||
:key="item.id"
|
||||
:data-testid="'item-' + item.id"
|
||||
@click="$emit('update:selected', new Set([item.id]))"
|
||||
>
|
||||
{{ item.name }}
|
||||
</button>
|
||||
<button
|
||||
data-testid="deselect"
|
||||
@click="$emit('update:selected', new Set())"
|
||||
>×</button>
|
||||
</div>
|
||||
`
|
||||
}
|
||||
|
||||
const baseSchema: RemoteItemSchema = {
|
||||
value_field: 'id',
|
||||
label_field: 'name',
|
||||
preview_type: 'image'
|
||||
}
|
||||
|
||||
function buildWidget(
|
||||
remoteCombo: Partial<Omit<RemoteComboConfig, 'route' | 'item_schema'>> = {},
|
||||
value: string | undefined = undefined
|
||||
): SimplifiedWidget<string | undefined> {
|
||||
const spec: ComboInputSpec = {
|
||||
type: 'COMBO',
|
||||
name: 'voice',
|
||||
remote_combo: {
|
||||
route: '/voices',
|
||||
item_schema: baseSchema,
|
||||
...remoteCombo
|
||||
}
|
||||
}
|
||||
return createMockWidget<string | undefined>({
|
||||
name: 'voice',
|
||||
type: 'COMBO',
|
||||
value,
|
||||
spec
|
||||
})
|
||||
}
|
||||
|
||||
function renderWidget(
|
||||
widget: SimplifiedWidget<string | undefined>,
|
||||
modelValue: string | undefined = undefined
|
||||
) {
|
||||
return render(RichComboWidget, {
|
||||
props: {
|
||||
widget,
|
||||
modelValue: modelValue ?? widget.value
|
||||
},
|
||||
global: {
|
||||
plugins: [createTestingPinia(), i18n],
|
||||
stubs: { FormDropdown: FormDropdownStub }
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function mockAxiosResponseOnce(data: unknown) {
|
||||
vi.mocked(axios.get).mockResolvedValueOnce({ data })
|
||||
}
|
||||
|
||||
function mockAxiosErrorOnce(status: number) {
|
||||
vi.mocked(axios.get).mockRejectedValueOnce(
|
||||
new AxiosError(`HTTP ${status}`, 'ERR_BAD_RESPONSE', undefined, undefined, {
|
||||
status,
|
||||
statusText: '',
|
||||
headers: {},
|
||||
config: { headers: new AxiosHeaders() },
|
||||
data: null
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
function mockAxiosNetworkErrorOnce() {
|
||||
vi.mocked(axios.get).mockRejectedValueOnce(
|
||||
new AxiosError('Network Error', 'ERR_NETWORK')
|
||||
)
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
// Cache API isn't in happy-dom by default. Stub a no-op cache so getCached
|
||||
// always returns null (forces a fetch) and setCache/clearCache resolve.
|
||||
vi.stubGlobal('caches', {
|
||||
open: vi.fn(() =>
|
||||
Promise.resolve({
|
||||
match: vi.fn(() => Promise.resolve(undefined)),
|
||||
put: vi.fn(() => Promise.resolve()),
|
||||
delete: vi.fn(() => Promise.resolve(true))
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.unstubAllGlobals()
|
||||
})
|
||||
|
||||
describe('RichComboWidget', () => {
|
||||
it('mounts, fetches, and renders the items returned from the route', async () => {
|
||||
mockAxiosResponseOnce([
|
||||
{ id: 'a', name: 'Alice' },
|
||||
{ id: 'b', name: 'Bob' }
|
||||
])
|
||||
|
||||
renderWidget(buildWidget())
|
||||
|
||||
await waitFor(() =>
|
||||
expect(screen.getByTestId('items-count').textContent).toBe('2')
|
||||
)
|
||||
expect(screen.getByText('Alice')).toBeTruthy()
|
||||
expect(screen.getByText('Bob')).toBeTruthy()
|
||||
expect(vi.mocked(axios.get)).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('shows the load-failed placeholder on a non-retriable 404 without retrying', async () => {
|
||||
mockAxiosErrorOnce(404)
|
||||
|
||||
renderWidget(buildWidget())
|
||||
|
||||
await waitFor(() =>
|
||||
expect(screen.getByTestId('placeholder').textContent).toBe(
|
||||
'widgets.remoteCombo.loadFailed'
|
||||
)
|
||||
)
|
||||
expect(vi.mocked(axios.get)).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('shows the load-failed placeholder when retries are exhausted', async () => {
|
||||
// max_retries=1 lets us assert exhaustion without sleeping through the
|
||||
// exponential backoff (`attempts++` then `attempts >= maxRetries` breaks
|
||||
// before any setTimeout call).
|
||||
mockAxiosNetworkErrorOnce()
|
||||
|
||||
renderWidget(buildWidget({ max_retries: 1 }))
|
||||
|
||||
await waitFor(() =>
|
||||
expect(screen.getByTestId('placeholder').textContent).toBe(
|
||||
'widgets.remoteCombo.loadFailed'
|
||||
)
|
||||
)
|
||||
expect(vi.mocked(axios.get)).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('refetches when the refresh button is clicked', async () => {
|
||||
mockAxiosResponseOnce([{ id: 'a', name: 'Alice' }])
|
||||
|
||||
renderWidget(buildWidget())
|
||||
|
||||
await waitFor(() =>
|
||||
expect(screen.getByTestId('items-count').textContent).toBe('1')
|
||||
)
|
||||
|
||||
mockAxiosResponseOnce([
|
||||
{ id: 'a', name: 'Alice' },
|
||||
{ id: 'b', name: 'Bob' }
|
||||
])
|
||||
await userEvent.click(screen.getByLabelText('g.refresh'))
|
||||
|
||||
await waitFor(() =>
|
||||
expect(screen.getByTestId('items-count').textContent).toBe('2')
|
||||
)
|
||||
expect(vi.mocked(axios.get)).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
|
||||
it('clears modelValue to undefined when the selected item is toggled off (B1 regression)', async () => {
|
||||
mockAxiosResponseOnce([{ id: 'a', name: 'Alice' }])
|
||||
|
||||
const { emitted } = renderWidget(buildWidget(), 'a')
|
||||
|
||||
expect(await screen.findByTestId('item-a')).toBeTruthy()
|
||||
|
||||
await userEvent.click(screen.getByTestId('deselect'))
|
||||
|
||||
const updates = emitted('update:modelValue')
|
||||
expect(updates).toBeTruthy()
|
||||
expect(updates!.at(-1)).toEqual([undefined])
|
||||
})
|
||||
|
||||
it('preserves a stale modelValue when the fetched items do not contain that id', async () => {
|
||||
mockAxiosResponseOnce([
|
||||
{ id: 'a', name: 'Alice' },
|
||||
{ id: 'b', name: 'Bob' }
|
||||
])
|
||||
|
||||
const { emitted } = renderWidget(buildWidget(), 'stale-id')
|
||||
|
||||
await waitFor(() =>
|
||||
expect(screen.getByTestId('items-count').textContent).toBe('2')
|
||||
)
|
||||
|
||||
// The selection sync watcher only mutates the internal selectedSet — it
|
||||
// never writes to modelValue, so the stale id round-trips intact when the
|
||||
// workflow is later saved.
|
||||
expect(emitted('update:modelValue')).toBeFalsy()
|
||||
expect(screen.getByTestId('placeholder').textContent).toBe(
|
||||
'widgets.uploadSelect.placeholder'
|
||||
)
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,345 @@
|
||||
<script setup lang="ts">
|
||||
import { computed, onMounted, onUnmounted, provide, ref, watch } from 'vue'
|
||||
import { useI18n } from 'vue-i18n'
|
||||
|
||||
import { useFeatureFlags } from '@/composables/useFeatureFlags'
|
||||
import { useWorkspaceAuthStore } from '@/platform/workspace/stores/workspaceAuthStore'
|
||||
import { isComboInputSpec } from '@/schemas/nodeDef/nodeDefSchemaV2'
|
||||
import type {
|
||||
RemoteComboConfig,
|
||||
RemoteItemSchema
|
||||
} from '@/schemas/nodeDefSchema'
|
||||
import { useApiKeyAuthStore } from '@/stores/apiKeyAuthStore'
|
||||
import { useAuthStore } from '@/stores/authStore'
|
||||
import type { SimplifiedWidget } from '@/types/simplifiedWidget'
|
||||
import { cn } from '@comfyorg/tailwind-utils'
|
||||
|
||||
import FormDropdown from './form/dropdown/FormDropdown.vue'
|
||||
import type { FormDropdownItem, LayoutMode } from './form/dropdown/types'
|
||||
import { AssetKindKey } from './form/dropdown/types'
|
||||
import {
|
||||
buildSearchText,
|
||||
extractItems,
|
||||
getByPath,
|
||||
mapToDropdownItem
|
||||
} from '../utils/itemSchemaUtils'
|
||||
import { fetchRemoteRoute } from '../utils/fetchRemoteRoute'
|
||||
import {
|
||||
buildCacheKey,
|
||||
getBackoff,
|
||||
isRetriableError,
|
||||
summarizeError,
|
||||
summarizePayload
|
||||
} from '../utils/richComboHelpers'
|
||||
|
||||
const DEFAULT_MAX_RETRIES = 5
|
||||
const DEFAULT_TIMEOUT = 30000
|
||||
|
||||
// --- Persistent cache using browser Cache API (survives page reloads) ---
|
||||
const CACHE_NAME = 'comfy-remote-widget'
|
||||
|
||||
// Mirrors useAuthStore().getAuthHeader()'s priority chain so the cache is
|
||||
// partitioned by the *active* auth context, not just the firebase user.
|
||||
// Same firebase user across two workspaces, or across workspace ↔ personal,
|
||||
// would otherwise share a cache and bleed data.
|
||||
//
|
||||
// Returns an opaque, non-secret identifier. The API-key branch deliberately
|
||||
// returns a constant rather than the key value or a hash of it: hashing is
|
||||
// async (SubtleCrypto), and grouping all keys on one machine under a single
|
||||
// scope is an acceptable tradeoff for the rare key-rotation case.
|
||||
function getAuthScope(): string {
|
||||
const { flags } = useFeatureFlags()
|
||||
if (flags.teamWorkspacesEnabled) {
|
||||
const wsId = useWorkspaceAuthStore().currentWorkspace?.id
|
||||
if (wsId) return `ws:${wsId}`
|
||||
}
|
||||
const uid = useAuthStore().userId
|
||||
if (uid) return `fb:${uid}`
|
||||
return useApiKeyAuthStore().getApiKey() ? 'apikey' : 'anon'
|
||||
}
|
||||
|
||||
function cacheKeyFor(config: RemoteComboConfig): string {
|
||||
return buildCacheKey(config, getAuthScope())
|
||||
}
|
||||
|
||||
async function getCached(config: RemoteComboConfig): Promise<unknown[] | null> {
|
||||
try {
|
||||
const cache = await caches.open(CACHE_NAME)
|
||||
const resp = await cache.match(cacheKeyFor(config))
|
||||
if (!resp) return null
|
||||
const entry = await resp.json()
|
||||
const ttl = config.refresh
|
||||
if (!ttl || ttl <= 0) return entry.data
|
||||
if (Date.now() - entry.timestamp < ttl) return entry.data
|
||||
return null
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
async function clearCache(config: RemoteComboConfig) {
|
||||
try {
|
||||
const cache = await caches.open(CACHE_NAME)
|
||||
await cache.delete(cacheKeyFor(config))
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
async function setCache(config: RemoteComboConfig, data: unknown[]) {
|
||||
try {
|
||||
const cache = await caches.open(CACHE_NAME)
|
||||
const body = JSON.stringify({ data, timestamp: Date.now() })
|
||||
await cache.put(cacheKeyFor(config), new Response(body))
|
||||
} catch {
|
||||
// Cache API unavailable — widget still works, just no persistence
|
||||
}
|
||||
}
|
||||
|
||||
const { widget } = defineProps<{
|
||||
widget: SimplifiedWidget<string | undefined>
|
||||
}>()
|
||||
|
||||
const modelValue = defineModel<string>()
|
||||
|
||||
const { t } = useI18n()
|
||||
|
||||
const comboSpec = computed(() => {
|
||||
if (widget.spec && isComboInputSpec(widget.spec)) {
|
||||
return widget.spec
|
||||
}
|
||||
return undefined
|
||||
})
|
||||
const remoteConfig = computed<RemoteComboConfig | undefined>(
|
||||
() => comboSpec.value?.remote_combo
|
||||
)
|
||||
const itemSchema = computed<RemoteItemSchema | undefined>(
|
||||
() => remoteConfig.value?.item_schema
|
||||
)
|
||||
|
||||
// --- Fetch state ---
|
||||
const rawItems = ref<unknown[]>([])
|
||||
const loading = ref(false)
|
||||
const error = ref<string | null>(null)
|
||||
let abortController: AbortController | undefined
|
||||
|
||||
// --- Auto-select policy ---
|
||||
// Only sets modelValue when it's empty; never overrides an existing value
|
||||
// (valid or stale) — user intent and workflow portability are preserved.
|
||||
function applyAutoSelect(config: RemoteComboConfig) {
|
||||
if (modelValue.value) return
|
||||
|
||||
const list = items.value
|
||||
if (list.length === 0) return
|
||||
|
||||
if (config.auto_select === 'first') {
|
||||
modelValue.value = list[0].id
|
||||
} else if (config.auto_select === 'last') {
|
||||
modelValue.value = list[list.length - 1].id
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchAll(config: RemoteComboConfig) {
|
||||
const controller = abortController!
|
||||
const maxRetries = config.max_retries ?? DEFAULT_MAX_RETRIES
|
||||
loading.value = true
|
||||
error.value = null
|
||||
|
||||
let attempts = 0
|
||||
while (!controller.signal.aborted) {
|
||||
try {
|
||||
const res = await fetchRemoteRoute(config.route, {
|
||||
timeout: config.timeout ?? DEFAULT_TIMEOUT,
|
||||
signal: controller.signal
|
||||
})
|
||||
if (controller.signal.aborted) return
|
||||
const fetchedItems = extractItems(res.data, config.response_key)
|
||||
if (fetchedItems === null) {
|
||||
console.error('RichComboWidget: expected array response', {
|
||||
route: config.route,
|
||||
responseKey: config.response_key,
|
||||
received: summarizePayload(res.data)
|
||||
})
|
||||
error.value = t('widgets.remoteCombo.loadFailed')
|
||||
break
|
||||
}
|
||||
await setCache(config, fetchedItems)
|
||||
if (controller.signal.aborted) return
|
||||
rawItems.value = fetchedItems
|
||||
applyAutoSelect(config)
|
||||
break
|
||||
} catch (err: unknown) {
|
||||
if (controller.signal.aborted) return
|
||||
console.error('RichComboWidget: fetch error', {
|
||||
route: config.route,
|
||||
error: summarizeError(err)
|
||||
})
|
||||
if (!isRetriableError(err)) {
|
||||
error.value = t('widgets.remoteCombo.loadFailed')
|
||||
break
|
||||
}
|
||||
attempts++
|
||||
if (attempts >= maxRetries) {
|
||||
error.value = t('widgets.remoteCombo.loadFailed')
|
||||
break
|
||||
}
|
||||
const delay = getBackoff(attempts)
|
||||
await new Promise((resolve) => setTimeout(resolve, delay))
|
||||
}
|
||||
}
|
||||
|
||||
if (!controller.signal.aborted) {
|
||||
loading.value = false
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchItems(bypassCache = false) {
|
||||
const config = remoteConfig.value
|
||||
if (!config) return
|
||||
|
||||
// Claim the active controller before any async work so the cache-hit
|
||||
// path can bail out if a later fetchItems supersedes us.
|
||||
abortController?.abort()
|
||||
const myController = new AbortController()
|
||||
abortController = myController
|
||||
|
||||
// Check cache first (unless manual refresh)
|
||||
if (!bypassCache) {
|
||||
const cached = await getCached(config)
|
||||
if (myController.signal.aborted) return
|
||||
if (cached) {
|
||||
rawItems.value = cached
|
||||
applyAutoSelect(config)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Reset items for fresh fetch
|
||||
rawItems.value = []
|
||||
await fetchAll(config)
|
||||
}
|
||||
|
||||
onMounted(() => {
|
||||
void fetchItems()
|
||||
})
|
||||
|
||||
onUnmounted(() => {
|
||||
abortController?.abort()
|
||||
})
|
||||
|
||||
// --- Preview type ---
|
||||
const assetKind = computed(() => itemSchema.value?.preview_type ?? 'image')
|
||||
|
||||
provide(AssetKindKey, assetKind)
|
||||
|
||||
// --- Item mapping ---
|
||||
const items = computed<FormDropdownItem[]>(() => {
|
||||
const schema = itemSchema.value
|
||||
if (schema) {
|
||||
return rawItems.value.map((raw) => mapToDropdownItem(raw, schema))
|
||||
}
|
||||
return rawItems.value.map((raw) => {
|
||||
const val = String(raw ?? '')
|
||||
return { id: val, name: val }
|
||||
})
|
||||
})
|
||||
|
||||
// --- Search ---
|
||||
const searchIndex = computed(() => {
|
||||
const schema = itemSchema.value
|
||||
const fields = schema?.search_fields
|
||||
if (!schema || !fields?.length) return new Map<string, string>()
|
||||
const index = new Map<string, string>()
|
||||
for (const raw of rawItems.value) {
|
||||
const id = String(getByPath(raw, schema.value_field) ?? '')
|
||||
const text = buildSearchText(raw, fields)
|
||||
if (text) index.set(id, text)
|
||||
}
|
||||
return index
|
||||
})
|
||||
|
||||
const layoutMode = ref<LayoutMode>('list')
|
||||
const selectedSet = ref<Set<string>>(new Set())
|
||||
|
||||
async function searcher(query: string, searchItems: FormDropdownItem[]) {
|
||||
if (!query.trim()) return searchItems
|
||||
const q = query.toLowerCase()
|
||||
return searchItems.filter((item) => {
|
||||
const text = searchIndex.value.get(item.id) ?? item.name.toLowerCase()
|
||||
return text.includes(q)
|
||||
})
|
||||
}
|
||||
|
||||
// --- Selection sync ---
|
||||
watch(
|
||||
[modelValue, items],
|
||||
([val]) => {
|
||||
selectedSet.value.clear()
|
||||
if (val) {
|
||||
const item = items.value.find((i) => i.id === val)
|
||||
if (item) selectedSet.value.add(item.id)
|
||||
}
|
||||
},
|
||||
{ immediate: true }
|
||||
)
|
||||
|
||||
function handleRefresh() {
|
||||
abortController?.abort()
|
||||
error.value = null
|
||||
const config = remoteConfig.value
|
||||
// Sequence the cache delete before the refetch: otherwise the (very fast)
|
||||
// setCache from a quickly-resolved network response can land the new entry
|
||||
// before the still-pending cache.delete removes it, silently dropping the
|
||||
// freshly-cached data on the next mount.
|
||||
void (async () => {
|
||||
if (config) await clearCache(config)
|
||||
await fetchItems(true)
|
||||
})()
|
||||
}
|
||||
|
||||
function handleSelection(selected: Set<string>) {
|
||||
modelValue.value = selected.values().next().value
|
||||
}
|
||||
|
||||
const placeholder = computed(() => {
|
||||
if (loading.value) return t('widgets.remoteCombo.loading')
|
||||
if (error.value) return error.value
|
||||
return t('widgets.uploadSelect.placeholder')
|
||||
})
|
||||
</script>
|
||||
|
||||
<template>
|
||||
<div
|
||||
class="flex w-full min-w-0 items-center gap-1 rounded-lg focus-within:ring focus-within:ring-component-node-widget-background-highlighted"
|
||||
@pointerdown.stop
|
||||
@pointermove.stop
|
||||
@pointerup.stop
|
||||
>
|
||||
<FormDropdown
|
||||
v-model:selected="selectedSet"
|
||||
v-model:layout-mode="layoutMode"
|
||||
:items="items"
|
||||
:placeholder="placeholder"
|
||||
:multiple="false"
|
||||
:show-sort="false"
|
||||
:show-layout-switcher="false"
|
||||
:searcher="searcher"
|
||||
class="min-w-0 flex-1"
|
||||
@update:selected="handleSelection"
|
||||
/>
|
||||
<button
|
||||
v-if="remoteConfig?.refresh_button !== false"
|
||||
type="button"
|
||||
:aria-label="t('g.refresh')"
|
||||
:title="t('g.refresh')"
|
||||
class="text-secondary flex size-7 shrink-0 items-center justify-center rounded-sm hover:bg-component-node-widget-background-hovered"
|
||||
@click.stop="handleRefresh"
|
||||
>
|
||||
<i
|
||||
:class="
|
||||
cn('icon-[lucide--refresh-cw] size-3.5', loading && 'animate-spin')
|
||||
"
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
</template>
|
||||
@@ -1,6 +1,7 @@
|
||||
<template>
|
||||
<RichComboWidget v-if="hasRemoteCombo" v-model="modelValue" :widget />
|
||||
<WidgetSelectDropdown
|
||||
v-if="isDropdownUIWidget"
|
||||
v-else-if="isDropdownUIWidget"
|
||||
v-model="modelValue"
|
||||
:widget
|
||||
:node-type="widget.nodeType ?? nodeType"
|
||||
@@ -24,6 +25,7 @@
|
||||
import { computed } from 'vue'
|
||||
|
||||
import { assetService } from '@/platform/assets/services/assetService'
|
||||
import RichComboWidget from '@/renderer/extensions/vueNodes/widgets/components/RichComboWidget.vue'
|
||||
import WidgetSelectDefault from '@/renderer/extensions/vueNodes/widgets/components/WidgetSelectDefault.vue'
|
||||
import WidgetSelectDropdown from '@/renderer/extensions/vueNodes/widgets/components/WidgetSelectDropdown.vue'
|
||||
import WidgetWithControl from '@/renderer/extensions/vueNodes/widgets/components/WidgetWithControl.vue'
|
||||
@@ -53,6 +55,8 @@ const comboSpec = computed<ComboInputSpec | undefined>(() => {
|
||||
return undefined
|
||||
})
|
||||
|
||||
const hasRemoteCombo = computed(() => !!comboSpec.value?.remote_combo)
|
||||
|
||||
const specDescriptor = computed<{
|
||||
kind: AssetKind
|
||||
allowUpload: boolean
|
||||
|
||||
@@ -33,6 +33,8 @@ interface Props {
|
||||
accept?: string
|
||||
filterOptions?: FilterOption[]
|
||||
sortOptions?: SortOption[]
|
||||
showSort?: boolean
|
||||
showLayoutSwitcher?: boolean
|
||||
showOwnershipFilter?: boolean
|
||||
ownershipOptions?: OwnershipFilterOption[]
|
||||
showBaseModelFilter?: boolean
|
||||
@@ -59,6 +61,8 @@ const {
|
||||
accept,
|
||||
filterOptions = [],
|
||||
sortOptions = getDefaultSortOptions(),
|
||||
showSort = true,
|
||||
showLayoutSwitcher = true,
|
||||
showOwnershipFilter,
|
||||
ownershipOptions,
|
||||
showBaseModelFilter,
|
||||
@@ -229,6 +233,8 @@ function handleSelection(item: FormDropdownItem, index: number) {
|
||||
v-model:base-model-selected="baseModelSelected"
|
||||
:filter-options
|
||||
:sort-options
|
||||
:show-sort
|
||||
:show-layout-switcher
|
||||
:show-ownership-filter
|
||||
:ownership-options
|
||||
:show-base-model-filter
|
||||
|
||||
@@ -68,7 +68,11 @@ const theButtonStyle = computed(() =>
|
||||
{{ placeholder }}
|
||||
</span>
|
||||
<span v-else>
|
||||
{{ selectedItems.map((item) => item.label ?? item.name).join(', ') }}
|
||||
{{
|
||||
selectedItems
|
||||
.map((item) => item.label || item.name || item.id)
|
||||
.join(', ')
|
||||
}}
|
||||
</span>
|
||||
</span>
|
||||
<i
|
||||
|
||||
@@ -20,6 +20,8 @@ interface Props {
|
||||
isSelected: (item: FormDropdownItem, index: number) => boolean
|
||||
filterOptions: FilterOption[]
|
||||
sortOptions: SortOption[]
|
||||
showSort?: boolean
|
||||
showLayoutSwitcher?: boolean
|
||||
showOwnershipFilter?: boolean
|
||||
ownershipOptions?: OwnershipFilterOption[]
|
||||
showBaseModelFilter?: boolean
|
||||
@@ -31,6 +33,8 @@ const {
|
||||
isSelected,
|
||||
filterOptions,
|
||||
sortOptions,
|
||||
showSort = true,
|
||||
showLayoutSwitcher = true,
|
||||
showOwnershipFilter,
|
||||
ownershipOptions,
|
||||
showBaseModelFilter,
|
||||
@@ -112,6 +116,8 @@ const virtualItems = computed<VirtualDropdownItem[]>(() =>
|
||||
v-model:ownership-selected="ownershipSelected"
|
||||
v-model:base-model-selected="baseModelSelected"
|
||||
:sort-options
|
||||
:show-sort
|
||||
:show-layout-switcher
|
||||
:show-ownership-filter
|
||||
:ownership-options
|
||||
:show-base-model-filter
|
||||
@@ -145,6 +151,7 @@ const virtualItems = computed<VirtualDropdownItem[]>(() =>
|
||||
:preview-url="item.preview_url ?? ''"
|
||||
:name="item.name"
|
||||
:label="item.label"
|
||||
:description="item.description"
|
||||
:layout="layoutMode"
|
||||
@click="emit('item-click', item, index)"
|
||||
/>
|
||||
|
||||
@@ -16,8 +16,10 @@ import type { LayoutMode, SortOption } from './types'
|
||||
|
||||
const { t } = useI18n()
|
||||
|
||||
defineProps<{
|
||||
const { showSort = true, showLayoutSwitcher = true } = defineProps<{
|
||||
sortOptions: SortOption[]
|
||||
showSort?: boolean
|
||||
showLayoutSwitcher?: boolean
|
||||
showOwnershipFilter?: boolean
|
||||
ownershipOptions?: OwnershipFilterOption[]
|
||||
showBaseModelFilter?: boolean
|
||||
@@ -112,6 +114,7 @@ function toggleBaseModelSelection(item: FilterOption) {
|
||||
/>
|
||||
|
||||
<Button
|
||||
v-if="showSort"
|
||||
ref="sortTriggerRef"
|
||||
:aria-label="t('assetBrowser.sortBy')"
|
||||
:title="t('assetBrowser.sortBy')"
|
||||
@@ -132,6 +135,7 @@ function toggleBaseModelSelection(item: FilterOption) {
|
||||
<i class="icon-[lucide--arrow-up-down] size-4" />
|
||||
</Button>
|
||||
<Popover
|
||||
v-if="showSort"
|
||||
ref="sortPopoverRef"
|
||||
:dismissable="true"
|
||||
:close-on-escape="true"
|
||||
@@ -306,6 +310,7 @@ function toggleBaseModelSelection(item: FilterOption) {
|
||||
</Popover>
|
||||
|
||||
<div
|
||||
v-if="showLayoutSwitcher"
|
||||
:class="
|
||||
cn(
|
||||
actionButtonStyle,
|
||||
|
||||
@@ -28,11 +28,15 @@ const assetKind = inject(AssetKindKey)
|
||||
|
||||
const isVideo = computed(() => assetKind?.value === 'video')
|
||||
const isMesh = computed(() => assetKind?.value === 'mesh')
|
||||
const isAudio = computed(() => assetKind?.value === 'audio')
|
||||
|
||||
const mediaContainerRef = ref<HTMLElement>()
|
||||
const resolvedMeshPreview = ref<string | null>(null)
|
||||
const meshPreviewAttempted = ref(false)
|
||||
|
||||
const audioRef = ref<HTMLAudioElement | null>(null)
|
||||
const isPlayingAudio = ref(false)
|
||||
|
||||
function toLookupName(name: string): string {
|
||||
const stripped = name.replace(/ \[output\]$/, '')
|
||||
const slash = stripped.lastIndexOf('/')
|
||||
@@ -68,6 +72,17 @@ function handleClick() {
|
||||
emit('click', props.index)
|
||||
}
|
||||
|
||||
function toggleAudioPreview(event: Event) {
|
||||
event.stopPropagation()
|
||||
const audio = audioRef.value
|
||||
if (!audio) return
|
||||
if (audio.paused) {
|
||||
void audio.play().catch(() => {})
|
||||
} else {
|
||||
audio.pause()
|
||||
}
|
||||
}
|
||||
|
||||
function handleImageLoad(event: Event) {
|
||||
emit('mediaLoad', event)
|
||||
if (!event.target || !(event.target instanceof HTMLImageElement)) return
|
||||
@@ -148,6 +163,35 @@ function handleVideoLoad(event: Event) {
|
||||
muted
|
||||
@loadeddata="handleVideoLoad"
|
||||
/>
|
||||
<button
|
||||
v-else-if="previewUrl && isAudio"
|
||||
type="button"
|
||||
:aria-label="
|
||||
isPlayingAudio
|
||||
? t('widgets.remoteCombo.pauseAudioPreview')
|
||||
: t('widgets.remoteCombo.playAudioPreview')
|
||||
"
|
||||
:aria-pressed="isPlayingAudio"
|
||||
class="flex size-full cursor-pointer items-center justify-center bg-component-node-widget-background hover:bg-component-node-widget-background-hovered"
|
||||
@click.stop="toggleAudioPreview"
|
||||
>
|
||||
<audio
|
||||
ref="audioRef"
|
||||
:src="previewUrl"
|
||||
preload="none"
|
||||
@play="isPlayingAudio = true"
|
||||
@pause="isPlayingAudio = false"
|
||||
@ended="isPlayingAudio = false"
|
||||
/>
|
||||
<i
|
||||
:class="
|
||||
cn(
|
||||
'text-secondary size-5',
|
||||
isPlayingAudio ? 'icon-[lucide--pause]' : 'icon-[lucide--play]'
|
||||
)
|
||||
"
|
||||
/>
|
||||
</button>
|
||||
<img
|
||||
v-else-if="displayedPreviewUrl"
|
||||
:src="displayedPreviewUrl"
|
||||
@@ -193,6 +237,13 @@ function handleVideoLoad(event: Event) {
|
||||
>
|
||||
{{ label ?? name }}
|
||||
</span>
|
||||
<!-- Description -->
|
||||
<span
|
||||
v-if="description && layout !== 'grid'"
|
||||
class="text-secondary line-clamp-1 block overflow-hidden text-xs"
|
||||
>
|
||||
{{ description }}
|
||||
</span>
|
||||
<!-- Meta Data -->
|
||||
<span v-if="actualDimensions" class="text-secondary block text-xs">
|
||||
{{ actualDimensions }}
|
||||
|
||||
@@ -12,7 +12,9 @@ export interface FormDropdownItem {
|
||||
name: string
|
||||
/** Original/alternate label (e.g., original filename) */
|
||||
label?: string
|
||||
/** Preview image/video URL */
|
||||
/** Short description shown below the name in list view */
|
||||
description?: string
|
||||
/** Preview image/video/audio URL */
|
||||
preview_url?: string
|
||||
/** Whether the item is immutable (public model) - used for ownership filtering */
|
||||
is_immutable?: boolean
|
||||
@@ -47,6 +49,7 @@ export interface FormDropdownMenuItemProps {
|
||||
previewUrl: string
|
||||
name: string
|
||||
label?: string
|
||||
description?: string
|
||||
layout?: LayoutMode
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
import axios from 'axios'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { fetchRemoteRoute } from '@/renderer/extensions/vueNodes/widgets/utils/fetchRemoteRoute'
|
||||
import type { AuthHeader } from '@/types/authTypes'
|
||||
|
||||
const COMFY_API_BASE = 'https://api.example.test'
|
||||
|
||||
const mockAuth = vi.hoisted(() => ({
|
||||
authHeader: null as AuthHeader | null
|
||||
}))
|
||||
|
||||
vi.mock('axios', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof axios>()
|
||||
return {
|
||||
default: {
|
||||
...actual,
|
||||
get: vi.fn()
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
vi.mock('@/config/comfyApi', () => ({
|
||||
getComfyApiBaseUrl: () => COMFY_API_BASE
|
||||
}))
|
||||
|
||||
vi.mock('@/stores/authStore', () => ({
|
||||
useAuthStore: vi.fn(() => ({
|
||||
getAuthHeader: vi.fn(() => Promise.resolve(mockAuth.authHeader))
|
||||
}))
|
||||
}))
|
||||
|
||||
describe('fetchRemoteRoute', () => {
|
||||
beforeEach(() => {
|
||||
vi.mocked(axios.get).mockResolvedValue({ data: [] })
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
mockAuth.authHeader = null
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('prepends the comfy api base URL to the route', async () => {
|
||||
await fetchRemoteRoute('/voices')
|
||||
const [url] = vi.mocked(axios.get).mock.calls[0]
|
||||
expect(url).toBe(`${COMFY_API_BASE}/voices`)
|
||||
})
|
||||
|
||||
it('injects the auth header when one is available', async () => {
|
||||
mockAuth.authHeader = { Authorization: 'Bearer token-123' }
|
||||
await fetchRemoteRoute('/voices')
|
||||
const [, config] = vi.mocked(axios.get).mock.calls[0]
|
||||
expect(config?.headers).toEqual({ Authorization: 'Bearer token-123' })
|
||||
})
|
||||
|
||||
it('does not set headers when no auth header is available', async () => {
|
||||
mockAuth.authHeader = null
|
||||
await fetchRemoteRoute('/voices')
|
||||
const [, config] = vi.mocked(axios.get).mock.calls[0]
|
||||
expect(config?.headers).toBeUndefined()
|
||||
})
|
||||
|
||||
it('forwards params, timeout and signal to axios', async () => {
|
||||
const controller = new AbortController()
|
||||
await fetchRemoteRoute('/voices', {
|
||||
params: { filter: 'pro', limit: '10' },
|
||||
timeout: 5000,
|
||||
signal: controller.signal
|
||||
})
|
||||
const [, config] = vi.mocked(axios.get).mock.calls[0]
|
||||
expect(config?.params).toEqual({ filter: 'pro', limit: '10' })
|
||||
expect(config?.timeout).toBe(5000)
|
||||
expect(config?.signal).toBe(controller.signal)
|
||||
})
|
||||
|
||||
it('returns the axios response', async () => {
|
||||
vi.mocked(axios.get).mockResolvedValueOnce({ data: { items: [1, 2] } })
|
||||
const res = await fetchRemoteRoute('/voices')
|
||||
expect(res.data).toEqual({ items: [1, 2] })
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,41 @@
|
||||
import axios from 'axios'
|
||||
|
||||
import { getComfyApiBaseUrl } from '@/config/comfyApi'
|
||||
import { useAuthStore } from '@/stores/authStore'
|
||||
import type { AuthHeader } from '@/types/authTypes'
|
||||
|
||||
/**
|
||||
* Resolve a RemoteComboOptions route to a full URL. Routes are always relative
|
||||
* paths and are prepended with the comfy-api base URL.
|
||||
*/
|
||||
function resolveRoute(route: string): string {
|
||||
return getComfyApiBaseUrl() + route
|
||||
}
|
||||
|
||||
/**
|
||||
* Get auth headers for a remote request. Always injected — comfy-api requires it.
|
||||
*/
|
||||
async function getRemoteAuthHeaders(): Promise<{ headers?: AuthHeader }> {
|
||||
const authStore = useAuthStore()
|
||||
const authHeader = await authStore.getAuthHeader()
|
||||
if (authHeader) {
|
||||
return { headers: authHeader }
|
||||
}
|
||||
return {}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience: make an authenticated GET request to a remote route.
|
||||
*/
|
||||
export async function fetchRemoteRoute(
|
||||
route: string,
|
||||
options: {
|
||||
params?: Record<string, string>
|
||||
timeout?: number
|
||||
signal?: AbortSignal
|
||||
} = {}
|
||||
) {
|
||||
const url = resolveRoute(route)
|
||||
const authHeaders = await getRemoteAuthHeaders()
|
||||
return axios.get(url, { ...options, ...authHeaders })
|
||||
}
|
||||
@@ -0,0 +1,254 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import {
|
||||
buildSearchText,
|
||||
extractItems,
|
||||
getByPath,
|
||||
mapToDropdownItem,
|
||||
resolveLabel
|
||||
} from '@/renderer/extensions/vueNodes/widgets/utils/itemSchemaUtils'
|
||||
|
||||
describe('getByPath', () => {
|
||||
it('returns a top-level value for a plain key', () => {
|
||||
expect(getByPath({ name: 'Alice' }, 'name')).toBe('Alice')
|
||||
})
|
||||
|
||||
it('traverses nested objects via dot-path', () => {
|
||||
expect(getByPath({ profile: { name: 'Alice' } }, 'profile.name')).toBe(
|
||||
'Alice'
|
||||
)
|
||||
})
|
||||
|
||||
it('treats numeric segments as array indices', () => {
|
||||
expect(getByPath({ items: ['a', 'b', 'c'] }, 'items.1')).toBe('b')
|
||||
})
|
||||
|
||||
it('combines nested objects and array indices', () => {
|
||||
const obj = { data: { results: [{ id: 'x' }, { id: 'y' }] } }
|
||||
expect(getByPath(obj, 'data.results.1.id')).toBe('y')
|
||||
})
|
||||
|
||||
it('returns undefined for a missing top-level key', () => {
|
||||
expect(getByPath({ a: 1 }, 'b')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('returns undefined when traversing past a null segment', () => {
|
||||
expect(getByPath({ a: null }, 'a.b')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('returns undefined when the root is null', () => {
|
||||
expect(getByPath(null, 'a')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('returns undefined when the root is undefined', () => {
|
||||
expect(getByPath(undefined, 'a')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('returns undefined for an out-of-bounds array index', () => {
|
||||
expect(getByPath({ items: ['a'] }, 'items.5')).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('resolveLabel', () => {
|
||||
it('resolves a plain dot-path to its value', () => {
|
||||
expect(resolveLabel('name', { name: 'Alice' })).toBe('Alice')
|
||||
})
|
||||
|
||||
it('resolves a nested dot-path without placeholders', () => {
|
||||
expect(resolveLabel('profile.name', { profile: { name: 'Alice' } })).toBe(
|
||||
'Alice'
|
||||
)
|
||||
})
|
||||
|
||||
it('substitutes a single {field} placeholder', () => {
|
||||
expect(resolveLabel('Name: {name}', { name: 'Alice' })).toBe('Name: Alice')
|
||||
})
|
||||
|
||||
it('substitutes multiple placeholders', () => {
|
||||
expect(
|
||||
resolveLabel('{first} {last}', { first: 'Alice', last: 'Liddell' })
|
||||
).toBe('Alice Liddell')
|
||||
})
|
||||
|
||||
it('substitutes placeholders with dot-paths', () => {
|
||||
expect(
|
||||
resolveLabel('{profile.name} ({profile.age})', {
|
||||
profile: { name: 'Alice', age: 30 }
|
||||
})
|
||||
).toBe('Alice (30)')
|
||||
})
|
||||
|
||||
it('replaces missing placeholder fields with an empty string', () => {
|
||||
expect(resolveLabel('{name} - {missing}', { name: 'Alice' })).toBe(
|
||||
'Alice - '
|
||||
)
|
||||
})
|
||||
|
||||
it('returns an empty string when a plain path resolves to undefined', () => {
|
||||
expect(resolveLabel('missing', { a: 1 })).toBe('')
|
||||
})
|
||||
|
||||
it('coerces numeric values to strings', () => {
|
||||
expect(resolveLabel('{count}', { count: 5 })).toBe('5')
|
||||
})
|
||||
})
|
||||
|
||||
describe('mapToDropdownItem', () => {
|
||||
it('maps required fields to id and name', () => {
|
||||
const item = mapToDropdownItem(
|
||||
{ voice_id: 'v1', label: 'Roger' },
|
||||
{ value_field: 'voice_id', label_field: 'label', preview_type: 'image' }
|
||||
)
|
||||
|
||||
expect(item).toEqual({
|
||||
id: 'v1',
|
||||
name: 'Roger',
|
||||
description: undefined,
|
||||
preview_url: undefined
|
||||
})
|
||||
})
|
||||
|
||||
it('includes description when description_field is set', () => {
|
||||
const item = mapToDropdownItem(
|
||||
{ id: 'v1', label: 'Roger', desc: 'Laid-back American male' },
|
||||
{
|
||||
value_field: 'id',
|
||||
label_field: 'label',
|
||||
description_field: 'desc',
|
||||
preview_type: 'image'
|
||||
}
|
||||
)
|
||||
|
||||
expect(item.description).toBe('Laid-back American male')
|
||||
})
|
||||
|
||||
it('includes preview_url when preview_url_field is set', () => {
|
||||
const item = mapToDropdownItem(
|
||||
{ id: 'v1', label: 'Roger', sample: 'https://example.com/a.mp3' },
|
||||
{
|
||||
value_field: 'id',
|
||||
label_field: 'label',
|
||||
preview_url_field: 'sample',
|
||||
preview_type: 'audio'
|
||||
}
|
||||
)
|
||||
|
||||
expect(item.preview_url).toBe('https://example.com/a.mp3')
|
||||
})
|
||||
|
||||
it('resolves label_field templates with placeholders', () => {
|
||||
const item = mapToDropdownItem(
|
||||
{ id: 'v1', first: 'Alice', last: 'Liddell' },
|
||||
{
|
||||
value_field: 'id',
|
||||
label_field: '{first} {last}',
|
||||
preview_type: 'image'
|
||||
}
|
||||
)
|
||||
|
||||
expect(item.name).toBe('Alice Liddell')
|
||||
})
|
||||
|
||||
it('resolves dot-path fields for nested data', () => {
|
||||
const item = mapToDropdownItem(
|
||||
{ task_result: { elements: [{ element_id: 'e1', name: 'Elem' }] } },
|
||||
{
|
||||
value_field: 'task_result.elements.0.element_id',
|
||||
label_field: 'task_result.elements.0.name',
|
||||
preview_type: 'image'
|
||||
}
|
||||
)
|
||||
|
||||
expect(item.id).toBe('e1')
|
||||
expect(item.name).toBe('Elem')
|
||||
})
|
||||
|
||||
it('stringifies non-string value_field', () => {
|
||||
const item = mapToDropdownItem(
|
||||
{ id: 42, label: 'Answer' },
|
||||
{ value_field: 'id', label_field: 'label', preview_type: 'image' }
|
||||
)
|
||||
|
||||
expect(item.id).toBe('42')
|
||||
})
|
||||
|
||||
it('returns an empty string id when value_field is missing', () => {
|
||||
const item = mapToDropdownItem(
|
||||
{ label: 'Orphan' },
|
||||
{ value_field: 'id', label_field: 'label', preview_type: 'image' }
|
||||
)
|
||||
|
||||
expect(item.id).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
describe('extractItems', () => {
|
||||
it('returns the full response when responseKey is undefined', () => {
|
||||
expect(extractItems([1, 2, 3])).toEqual([1, 2, 3])
|
||||
})
|
||||
|
||||
it('extracts items from a top-level key', () => {
|
||||
expect(
|
||||
extractItems({ voices: [{ id: 'a' }, { id: 'b' }] }, 'voices')
|
||||
).toEqual([{ id: 'a' }, { id: 'b' }])
|
||||
})
|
||||
|
||||
it('extracts items via a dot-path', () => {
|
||||
expect(extractItems({ data: { items: [1, 2] } }, 'data.items')).toEqual([
|
||||
1, 2
|
||||
])
|
||||
})
|
||||
|
||||
it('returns an empty array for a valid empty list', () => {
|
||||
expect(extractItems([])).toEqual([])
|
||||
})
|
||||
|
||||
it('returns null when the path does not exist', () => {
|
||||
expect(extractItems({ a: 1 }, 'nonexistent')).toBeNull()
|
||||
})
|
||||
|
||||
it('returns null when the path resolves to a non-array', () => {
|
||||
expect(
|
||||
extractItems({ data: { items: 'not an array' } }, 'data.items')
|
||||
).toBeNull()
|
||||
})
|
||||
|
||||
it('returns null when the full response is not an array', () => {
|
||||
expect(extractItems({ not: 'array' })).toBeNull()
|
||||
})
|
||||
|
||||
it('returns null when response is null', () => {
|
||||
expect(extractItems(null)).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe('buildSearchText', () => {
|
||||
it('joins multiple fields with a space', () => {
|
||||
expect(buildSearchText({ a: 'Hello', b: 'World' }, ['a', 'b'])).toBe(
|
||||
'hello world'
|
||||
)
|
||||
})
|
||||
|
||||
it('lowercases the result', () => {
|
||||
expect(buildSearchText({ name: 'ALICE' }, ['name'])).toBe('alice')
|
||||
})
|
||||
|
||||
it('drops missing fields', () => {
|
||||
expect(buildSearchText({ name: 'Alice' }, ['name', 'missing'])).toBe(
|
||||
'alice'
|
||||
)
|
||||
})
|
||||
|
||||
it('supports dot-path fields', () => {
|
||||
expect(
|
||||
buildSearchText({ profile: { name: 'Alice', age: 30 } }, [
|
||||
'profile.name',
|
||||
'profile.age'
|
||||
])
|
||||
).toBe('alice 30')
|
||||
})
|
||||
|
||||
it('returns an empty string when all fields are missing', () => {
|
||||
expect(buildSearchText({ name: 'Alice' }, ['missing'])).toBe('')
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,62 @@
|
||||
import type { RemoteItemSchema } from '@/schemas/nodeDefSchema'
|
||||
import type { FormDropdownItem } from '@/renderer/extensions/vueNodes/widgets/components/form/dropdown/types'
|
||||
|
||||
/** Traverse an object by dot-path, treating numeric segments as array indices */
|
||||
export function getByPath(obj: unknown, path: string): unknown {
|
||||
return path.split('.').reduce((acc: unknown, key: string) => {
|
||||
if (acc == null) return undefined
|
||||
const idx = Number(key)
|
||||
if (Number.isInteger(idx) && idx >= 0 && Array.isArray(acc)) return acc[idx]
|
||||
return (acc as Record<string, unknown>)[key]
|
||||
}, obj)
|
||||
}
|
||||
|
||||
/** Resolve a label — either dot-path or template with {field.path} placeholders */
|
||||
export function resolveLabel(template: string, item: unknown): string {
|
||||
if (!template.includes('{')) {
|
||||
return String(getByPath(item, template) ?? '')
|
||||
}
|
||||
return template.replace(/\{([^}]+)\}/g, (_, path: string) =>
|
||||
String(getByPath(item, path) ?? '')
|
||||
)
|
||||
}
|
||||
|
||||
/** Map a raw API object to a FormDropdownItem using the item_schema */
|
||||
export function mapToDropdownItem(
|
||||
raw: unknown,
|
||||
schema: RemoteItemSchema
|
||||
): FormDropdownItem {
|
||||
return {
|
||||
id: String(getByPath(raw, schema.value_field) ?? ''),
|
||||
name: resolveLabel(schema.label_field, raw),
|
||||
description: schema.description_field
|
||||
? resolveLabel(schema.description_field, raw)
|
||||
: undefined,
|
||||
preview_url: schema.preview_url_field
|
||||
? String(getByPath(raw, schema.preview_url_field) ?? '')
|
||||
: undefined
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract items array from a full API response using `responseKey`.
|
||||
* Returns `null` when the resolved value isn't an array (path missing,
|
||||
* wrong shape, etc.) so callers can distinguish a malformed response
|
||||
* from a legitimate empty list.
|
||||
*/
|
||||
export function extractItems(
|
||||
response: unknown,
|
||||
responseKey?: string
|
||||
): unknown[] | null {
|
||||
const data = responseKey ? getByPath(response, responseKey) : response
|
||||
return Array.isArray(data) ? data : null
|
||||
}
|
||||
|
||||
/** Build search text for an item from the specified search fields */
|
||||
export function buildSearchText(raw: unknown, searchFields: string[]): string {
|
||||
return searchFields
|
||||
.map((field) => String(getByPath(raw, field) ?? ''))
|
||||
.filter(Boolean)
|
||||
.join(' ')
|
||||
.toLowerCase()
|
||||
}
|
||||
@@ -0,0 +1,249 @@
|
||||
import { AxiosError, AxiosHeaders } from 'axios'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import type { RemoteComboConfig } from '@/schemas/nodeDefSchema'
|
||||
|
||||
import {
|
||||
buildCacheKey,
|
||||
getBackoff,
|
||||
isRetriableError,
|
||||
summarizeError,
|
||||
summarizePayload
|
||||
} from '@/renderer/extensions/vueNodes/widgets/utils/richComboHelpers'
|
||||
|
||||
const baseConfig: RemoteComboConfig = {
|
||||
route: '/voices',
|
||||
item_schema: {
|
||||
value_field: 'id',
|
||||
label_field: 'name',
|
||||
preview_type: 'image'
|
||||
}
|
||||
}
|
||||
|
||||
function parseKey(key: string): URLSearchParams {
|
||||
return new URL(key).searchParams
|
||||
}
|
||||
|
||||
describe('buildCacheKey', () => {
|
||||
it('encodes the route and response_key', () => {
|
||||
const params = parseKey(
|
||||
buildCacheKey(
|
||||
{
|
||||
...baseConfig,
|
||||
route: '/voices',
|
||||
response_key: 'data.items'
|
||||
},
|
||||
'fb:user-a'
|
||||
)
|
||||
)
|
||||
expect(params.get('route')).toBe('/voices')
|
||||
expect(params.get('responseKey')).toBe('data.items')
|
||||
})
|
||||
|
||||
it('partitions by authScope', () => {
|
||||
const a = buildCacheKey(baseConfig, 'ws:team-a')
|
||||
const b = buildCacheKey(baseConfig, 'ws:team-b')
|
||||
expect(a).not.toBe(b)
|
||||
expect(parseKey(a).get('u')).toBe('ws:team-a')
|
||||
expect(parseKey(b).get('u')).toBe('ws:team-b')
|
||||
})
|
||||
|
||||
it('treats workspace, firebase, and api-key scopes as distinct buckets', () => {
|
||||
const ws = buildCacheKey(baseConfig, 'ws:abc')
|
||||
const fb = buildCacheKey(baseConfig, 'fb:abc')
|
||||
const apikey = buildCacheKey(baseConfig, 'apikey')
|
||||
expect(new Set([ws, fb, apikey]).size).toBe(3)
|
||||
})
|
||||
|
||||
it('falls back to "anon" when authScope is missing', () => {
|
||||
expect(parseKey(buildCacheKey(baseConfig, null)).get('u')).toBe('anon')
|
||||
expect(parseKey(buildCacheKey(baseConfig, undefined)).get('u')).toBe('anon')
|
||||
})
|
||||
|
||||
it('treats missing optional fields as empty so the key stays stable', () => {
|
||||
const params = parseKey(buildCacheKey(baseConfig, 'fb:user-a'))
|
||||
expect(params.get('responseKey')).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
describe('getBackoff', () => {
|
||||
it('grows exponentially from 1s', () => {
|
||||
expect(getBackoff(1)).toBe(2000)
|
||||
expect(getBackoff(2)).toBe(4000)
|
||||
expect(getBackoff(3)).toBe(8000)
|
||||
expect(getBackoff(4)).toBe(16000)
|
||||
})
|
||||
|
||||
it('caps at 16s for higher attempt counts', () => {
|
||||
expect(getBackoff(5)).toBe(16000)
|
||||
expect(getBackoff(10)).toBe(16000)
|
||||
expect(getBackoff(100)).toBe(16000)
|
||||
})
|
||||
})
|
||||
|
||||
describe('isRetriableError', () => {
|
||||
function axiosErrorWithStatus(status: number): AxiosError {
|
||||
return new AxiosError(
|
||||
`HTTP ${status}`,
|
||||
'ERR_BAD_RESPONSE',
|
||||
undefined,
|
||||
undefined,
|
||||
{
|
||||
status,
|
||||
statusText: '',
|
||||
headers: {},
|
||||
config: { headers: new AxiosHeaders() },
|
||||
data: null
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
it('retries non-axios errors (e.g. unexpected throws)', () => {
|
||||
expect(isRetriableError(new Error('boom'))).toBe(true)
|
||||
expect(isRetriableError('string error')).toBe(true)
|
||||
expect(isRetriableError(undefined)).toBe(true)
|
||||
})
|
||||
|
||||
it('retries axios errors with no response (network failures)', () => {
|
||||
const err = new AxiosError('Network Error', 'ERR_NETWORK')
|
||||
expect(isRetriableError(err)).toBe(true)
|
||||
})
|
||||
|
||||
it('retries 5xx responses', () => {
|
||||
expect(isRetriableError(axiosErrorWithStatus(500))).toBe(true)
|
||||
expect(isRetriableError(axiosErrorWithStatus(502))).toBe(true)
|
||||
expect(isRetriableError(axiosErrorWithStatus(503))).toBe(true)
|
||||
})
|
||||
|
||||
it('retries 408 (request timeout) and 429 (too many requests)', () => {
|
||||
expect(isRetriableError(axiosErrorWithStatus(408))).toBe(true)
|
||||
expect(isRetriableError(axiosErrorWithStatus(429))).toBe(true)
|
||||
})
|
||||
|
||||
it('does not retry other 4xx responses', () => {
|
||||
expect(isRetriableError(axiosErrorWithStatus(400))).toBe(false)
|
||||
expect(isRetriableError(axiosErrorWithStatus(401))).toBe(false)
|
||||
expect(isRetriableError(axiosErrorWithStatus(403))).toBe(false)
|
||||
expect(isRetriableError(axiosErrorWithStatus(404))).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('summarizeError', () => {
|
||||
it('extracts message, code and status from an axios error', () => {
|
||||
const err = new AxiosError(
|
||||
'Request failed',
|
||||
'ERR_BAD_RESPONSE',
|
||||
undefined,
|
||||
undefined,
|
||||
{
|
||||
status: 500,
|
||||
statusText: '',
|
||||
headers: {},
|
||||
config: { headers: new AxiosHeaders() },
|
||||
data: null
|
||||
}
|
||||
)
|
||||
expect(summarizeError(err)).toEqual({
|
||||
message: 'Request failed',
|
||||
code: 'ERR_BAD_RESPONSE',
|
||||
status: 500
|
||||
})
|
||||
})
|
||||
|
||||
it('does not include axios config, headers, request or response data', () => {
|
||||
const authedConfig = {
|
||||
url: '/voices',
|
||||
method: 'get',
|
||||
headers: new AxiosHeaders({ Authorization: 'Bearer SECRET-TOKEN-123' })
|
||||
}
|
||||
const err = new AxiosError(
|
||||
'Request failed',
|
||||
'ERR_BAD_RESPONSE',
|
||||
authedConfig,
|
||||
undefined,
|
||||
{
|
||||
status: 500,
|
||||
statusText: '',
|
||||
headers: { 'set-cookie': ['session=PRIVATE'] },
|
||||
config: authedConfig,
|
||||
data: { user_email: 'private@example.com' }
|
||||
}
|
||||
)
|
||||
const summary = summarizeError(err)
|
||||
|
||||
expect(JSON.stringify(summary)).not.toContain('SECRET-TOKEN-123')
|
||||
expect(JSON.stringify(summary)).not.toContain('PRIVATE')
|
||||
expect(JSON.stringify(summary)).not.toContain('private@example.com')
|
||||
expect(summary).not.toHaveProperty('config')
|
||||
expect(summary).not.toHaveProperty('request')
|
||||
expect(summary).not.toHaveProperty('response')
|
||||
})
|
||||
|
||||
it('reports an axios network error with no response as undefined status', () => {
|
||||
const err = new AxiosError('Network Error', 'ERR_NETWORK')
|
||||
expect(summarizeError(err)).toEqual({
|
||||
message: 'Network Error',
|
||||
code: 'ERR_NETWORK',
|
||||
status: undefined
|
||||
})
|
||||
})
|
||||
|
||||
it('summarizes a plain Error using its name and message', () => {
|
||||
expect(summarizeError(new TypeError('boom'))).toEqual({
|
||||
message: 'boom',
|
||||
name: 'TypeError'
|
||||
})
|
||||
})
|
||||
|
||||
it('coerces non-Error throwables to a message string', () => {
|
||||
expect(summarizeError('oops')).toEqual({ message: 'oops' })
|
||||
expect(summarizeError(42)).toEqual({ message: '42' })
|
||||
expect(summarizeError(null)).toEqual({ message: 'null' })
|
||||
expect(summarizeError(undefined)).toEqual({ message: 'undefined' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('summarizePayload', () => {
|
||||
it('reports array length without exposing values', () => {
|
||||
expect(
|
||||
summarizePayload([{ secret: 'a' }, { secret: 'b' }, { secret: 'c' }])
|
||||
).toEqual({
|
||||
type: 'array',
|
||||
length: 3
|
||||
})
|
||||
})
|
||||
|
||||
it('reports object keys without exposing values', () => {
|
||||
expect(
|
||||
summarizePayload({ user_email: 'private@example.com', voices: ['x'] })
|
||||
).toEqual({
|
||||
type: 'object',
|
||||
keys: ['user_email', 'voices'],
|
||||
keyCount: 2
|
||||
})
|
||||
})
|
||||
|
||||
it('caps the keys sample at 10 but reports the full key count', () => {
|
||||
const big: Record<string, number> = {}
|
||||
for (let i = 0; i < 25; i++) big[`k${i}`] = i
|
||||
const summary = summarizePayload(big) as {
|
||||
type: string
|
||||
keys: string[]
|
||||
keyCount: number
|
||||
}
|
||||
expect(summary.type).toBe('object')
|
||||
expect(summary.keys).toHaveLength(10)
|
||||
expect(summary.keyCount).toBe(25)
|
||||
})
|
||||
|
||||
it('distinguishes null and undefined', () => {
|
||||
expect(summarizePayload(null)).toEqual({ type: 'null' })
|
||||
expect(summarizePayload(undefined)).toEqual({ type: 'undefined' })
|
||||
})
|
||||
|
||||
it('reports primitive types without their value', () => {
|
||||
expect(summarizePayload('hello')).toEqual({ type: 'string' })
|
||||
expect(summarizePayload(123)).toEqual({ type: 'number' })
|
||||
expect(summarizePayload(true)).toEqual({ type: 'boolean' })
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,91 @@
|
||||
import axios from 'axios'
|
||||
|
||||
import type { RemoteComboConfig } from '@/schemas/nodeDefSchema'
|
||||
|
||||
const BACKOFF_BASE_MS = 1000
|
||||
const BACKOFF_CAP_MS = 16000
|
||||
|
||||
/**
|
||||
* Build a stable cache key for a remote combo configuration.
|
||||
*
|
||||
* All remote-combo routes go through comfy-api with auth, so the cache is
|
||||
* always partitioned by `authScope` — an opaque, non-secret identifier of
|
||||
* the active auth context (workspace id, firebase uid, etc.). Resolving the
|
||||
* scope is the caller's responsibility, which keeps this helper pure and
|
||||
* trivially testable.
|
||||
*/
|
||||
export function buildCacheKey(
|
||||
config: RemoteComboConfig,
|
||||
authScope: string | null | undefined
|
||||
): string {
|
||||
const params = new URLSearchParams({
|
||||
route: config.route,
|
||||
responseKey: config.response_key ?? '',
|
||||
u: authScope ?? 'anon'
|
||||
})
|
||||
return `https://cache.comfy.invalid/?${params}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Exponential backoff in milliseconds, capped at 16s. `count` is the
|
||||
* number of failed attempts so far (1-indexed for the first retry).
|
||||
*/
|
||||
export function getBackoff(count: number): number {
|
||||
return Math.min(BACKOFF_BASE_MS * Math.pow(2, count), BACKOFF_CAP_MS)
|
||||
}
|
||||
|
||||
/**
|
||||
* Distinguish transient errors (worth retrying) from permanent ones.
|
||||
* 401/403/404 etc. won't fix themselves — retrying wastes time.
|
||||
* Network-level failures (no response) are treated as retriable.
|
||||
*/
|
||||
export function isRetriableError(err: unknown): boolean {
|
||||
if (!axios.isAxiosError(err)) return true
|
||||
const status = err.response?.status
|
||||
if (status == null) return true
|
||||
if (status >= 500) return true
|
||||
return status === 408 || status === 429
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a console-safe summary of an unknown error. Authenticated remote
|
||||
* routes inject auth headers via fetchRemoteRoute, and AxiosError serializes
|
||||
* its `config` (including those headers) by default — so logging the raw
|
||||
* error would leak bearer tokens to devtools and any attached telemetry.
|
||||
* This summary keeps only the diagnostic essentials.
|
||||
*/
|
||||
export function summarizeError(err: unknown): Record<string, unknown> {
|
||||
if (axios.isAxiosError(err)) {
|
||||
return {
|
||||
message: err.message,
|
||||
code: err.code,
|
||||
status: err.response?.status
|
||||
}
|
||||
}
|
||||
if (err instanceof Error) {
|
||||
return { message: err.message, name: err.name }
|
||||
}
|
||||
return { message: String(err) }
|
||||
}
|
||||
|
||||
const PAYLOAD_KEY_SAMPLE = 10
|
||||
|
||||
/**
|
||||
* Build a console-safe summary of a remote response payload. Logs the
|
||||
* structural shape so devs can diagnose schema mismatches without the
|
||||
* actual values, which for authenticated routes may contain private data.
|
||||
*/
|
||||
export function summarizePayload(data: unknown): Record<string, unknown> {
|
||||
if (data === null) return { type: 'null' }
|
||||
if (data === undefined) return { type: 'undefined' }
|
||||
if (Array.isArray(data)) return { type: 'array', length: data.length }
|
||||
if (typeof data === 'object') {
|
||||
const keys = Object.keys(data as Record<string, unknown>)
|
||||
return {
|
||||
type: 'object',
|
||||
keys: keys.slice(0, PAYLOAD_KEY_SAMPLE),
|
||||
keyCount: keys.length
|
||||
}
|
||||
}
|
||||
return { type: typeof data }
|
||||
}
|
||||
@@ -5,6 +5,11 @@ import { resultItemType } from '@/schemas/apiSchema'
|
||||
import { CONTROL_OPTIONS } from '@/types/simplifiedWidget'
|
||||
|
||||
const zComboOption = z.union([z.string(), z.number()])
|
||||
|
||||
/**
|
||||
* Plain remote combo config — feeds a standard combo dropdown from a remote endpoint.
|
||||
* Handled by `useRemoteWidget` + `WidgetSelectDropdown`.
|
||||
*/
|
||||
const zRemoteWidgetConfig = z.object({
|
||||
route: z.string().url().or(z.string().startsWith('/')),
|
||||
refresh: z.number().gte(128).safe().or(z.number().lte(0).safe()).optional(),
|
||||
@@ -15,6 +20,32 @@ const zRemoteWidgetConfig = z.object({
|
||||
timeout: z.number().gte(0).optional(),
|
||||
max_retries: z.number().gte(0).optional()
|
||||
})
|
||||
|
||||
const zRemoteItemSchema = z.object({
|
||||
value_field: z.string(),
|
||||
label_field: z.string(),
|
||||
preview_url_field: z.string().optional(),
|
||||
preview_type: z.enum(['image', 'video', 'audio']).default('image'),
|
||||
description_field: z.string().optional(),
|
||||
search_fields: z.array(z.string()).optional()
|
||||
})
|
||||
|
||||
/**
|
||||
* Rich remote combo config — feeds `RichComboWidget` with item previews, search, and filtering.
|
||||
* Requires `item_schema`. Vue-nodes only. Routes are always relative paths and resolve against
|
||||
* the comfy-api base URL with auth headers injected. The endpoint returns the full items array
|
||||
* in a single response.
|
||||
*/
|
||||
const zRemoteComboConfig = z.object({
|
||||
route: z.string().startsWith('/'),
|
||||
item_schema: zRemoteItemSchema,
|
||||
refresh_button: z.boolean().optional(),
|
||||
auto_select: z.enum(['first', 'last']).optional(),
|
||||
refresh: z.number().gte(128).safe().or(z.number().lte(0).safe()).optional(),
|
||||
response_key: z.string().optional(),
|
||||
timeout: z.number().gte(0).optional(),
|
||||
max_retries: z.number().gte(0).optional()
|
||||
})
|
||||
const zMultiSelectOption = z.object({
|
||||
placeholder: z.string().optional(),
|
||||
chip: z.boolean().optional()
|
||||
@@ -96,6 +127,7 @@ export const zComboInputOptions = zBaseInputOptions.extend({
|
||||
animated_image_upload: z.boolean().optional(),
|
||||
options: z.array(zComboOption).optional(),
|
||||
remote: zRemoteWidgetConfig.optional(),
|
||||
remote_combo: zRemoteComboConfig.optional(),
|
||||
/** Whether the widget is a multi-select widget. */
|
||||
multi_select: zMultiSelectOption.optional()
|
||||
})
|
||||
@@ -352,7 +384,9 @@ export const zMatchTypeOptions = z.object({
|
||||
export type ComfyInputsSpec = z.infer<typeof zComfyInputsSpec>
|
||||
export type ComfyOutputTypesSpec = z.infer<typeof zComfyOutputTypesSpec>
|
||||
export type ComfyNodeDef = z.infer<typeof zComfyNodeDef>
|
||||
export type RemoteItemSchema = z.infer<typeof zRemoteItemSchema>
|
||||
export type RemoteWidgetConfig = z.infer<typeof zRemoteWidgetConfig>
|
||||
export type RemoteComboConfig = z.infer<typeof zRemoteComboConfig>
|
||||
|
||||
export type ComboInputOptions = z.infer<typeof zComboInputOptions>
|
||||
export type NumericInputOptions = z.infer<typeof zNumericInputOptions>
|
||||
|
||||
@@ -71,4 +71,66 @@ describe('validateNodeDef', () => {
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
describe('remote_combo route validation', () => {
|
||||
const buildNodeDef = (remoteCombo: object): unknown => ({
|
||||
...EXAMPLE_NODE_DEF,
|
||||
input: {
|
||||
required: {
|
||||
voice: ['COMBO', { remote_combo: remoteCombo }]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const baseRemoteCombo = {
|
||||
item_schema: { value_field: 'id', label_field: 'name' }
|
||||
}
|
||||
|
||||
it('accepts a relative route', () => {
|
||||
expect(
|
||||
validateComfyNodeDef(
|
||||
buildNodeDef({
|
||||
...baseRemoteCombo,
|
||||
route: '/voices'
|
||||
})
|
||||
)
|
||||
).not.toBeNull()
|
||||
})
|
||||
|
||||
it('rejects an absolute http URL', () => {
|
||||
expect(
|
||||
validateComfyNodeDef(
|
||||
buildNodeDef({
|
||||
...baseRemoteCombo,
|
||||
route: 'http://api.example.com/voices'
|
||||
}),
|
||||
() => {}
|
||||
)
|
||||
).toBeNull()
|
||||
})
|
||||
|
||||
it('rejects an absolute https URL', () => {
|
||||
expect(
|
||||
validateComfyNodeDef(
|
||||
buildNodeDef({
|
||||
...baseRemoteCombo,
|
||||
route: 'https://api.example.com/voices'
|
||||
}),
|
||||
() => {}
|
||||
)
|
||||
).toBeNull()
|
||||
})
|
||||
|
||||
it('rejects a route with no leading slash', () => {
|
||||
expect(
|
||||
validateComfyNodeDef(
|
||||
buildNodeDef({
|
||||
...baseRemoteCombo,
|
||||
route: 'voices'
|
||||
}),
|
||||
() => {}
|
||||
)
|
||||
).toBeNull()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -2,10 +2,6 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import type { ComfyWorkflowJSON } from '@/platform/workflow/validation/schemas/workflowSchema'
|
||||
|
||||
vi.mock('@/base/assert', () => ({
|
||||
assert: vi.fn()
|
||||
}))
|
||||
|
||||
const mockNodeOutputStore = vi.hoisted(() => ({
|
||||
snapshotOutputs: vi.fn(() => ({})),
|
||||
restoreOutputs: vi.fn()
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import * as Sentry from '@sentry/vue'
|
||||
import _ from 'es-toolkit/compat'
|
||||
|
||||
import { assert } from '@/base/assert'
|
||||
import type { CanvasPointerEvent } from '@/lib/litegraph/src/litegraph'
|
||||
import { LGraphCanvas, LiteGraph } from '@/lib/litegraph/src/litegraph'
|
||||
import { isDesktop } from '@/platform/distribution/types'
|
||||
import type { ComfyWorkflow } from '@/platform/workflow/management/stores/workflowStore'
|
||||
import { useWorkflowStore } from '@/platform/workflow/management/stores/workflowStore'
|
||||
import type { ComfyWorkflowJSON } from '@/platform/workflow/validation/schemas/workflowSchema'
|
||||
@@ -26,17 +27,28 @@ function isActiveTracker(tracker: ChangeTracker): boolean {
|
||||
const reportedInactiveCalls = new Set<string>()
|
||||
|
||||
/**
|
||||
* Report a ChangeTracker method being called on an inactive tracker.
|
||||
* Deduplicates per method+workflow per session to avoid signal noise on hot paths.
|
||||
* Report a ChangeTracker method being called on an inactive tracker —
|
||||
* a lifecycle violation that usually indicates stale extension state or
|
||||
* an incorrect call ordering. Reports once per method per workflow per
|
||||
* session so the signal is not drowned out by hot-path invocations while
|
||||
* still distinguishing between workflows.
|
||||
*/
|
||||
function reportInactiveTrackerCall(method: string, workflowPath: string) {
|
||||
const key = `${method}:${workflowPath}`
|
||||
if (reportedInactiveCalls.has(key)) return
|
||||
reportedInactiveCalls.add(key)
|
||||
assert(
|
||||
false,
|
||||
`ChangeTracker.${method}() called on inactive tracker for: ${workflowPath}`
|
||||
)
|
||||
|
||||
console.warn(`${method}() called on inactive tracker for: ${workflowPath}`)
|
||||
|
||||
if (isDesktop) {
|
||||
Sentry.captureMessage(
|
||||
`ChangeTracker.${method}() called on inactive tracker`,
|
||||
{
|
||||
level: 'warning',
|
||||
tags: { workflow: workflowPath }
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export class ChangeTracker {
|
||||
|
||||
@@ -115,7 +115,15 @@ export const defaultGraph: ComfyWorkflowJSON = {
|
||||
{ name: 'CLIP', type: 'CLIP', links: [3, 5], slot_index: 1 },
|
||||
{ name: 'VAE', type: 'VAE', links: [8], slot_index: 2 }
|
||||
],
|
||||
properties: {},
|
||||
properties: {
|
||||
models: [
|
||||
{
|
||||
name: 'v1-5-pruned-emaonly-fp16.safetensors',
|
||||
url: 'https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors',
|
||||
directory: 'checkpoints'
|
||||
}
|
||||
]
|
||||
},
|
||||
widgets_values: ['v1-5-pruned-emaonly-fp16.safetensors']
|
||||
}
|
||||
],
|
||||
|
||||
48
src/scripts/metadata/__fixtures__/helpers.ts
Normal file
48
src/scripts/metadata/__fixtures__/helpers.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { vi } from 'vitest'
|
||||
|
||||
export const EXPECTED_WORKFLOW = {
|
||||
nodes: [{ id: 1, type: 'KSampler', pos: [100, 100], size: [200, 200] }]
|
||||
}
|
||||
|
||||
export const EXPECTED_PROMPT = {
|
||||
'1': { class_type: 'KSampler', inputs: {} }
|
||||
}
|
||||
|
||||
type ReadMethod = 'readAsText' | 'readAsArrayBuffer'
|
||||
|
||||
export function mockFileReaderError(method: ReadMethod): void {
|
||||
vi.spyOn(FileReader.prototype, method).mockImplementation(
|
||||
function (this: FileReader) {
|
||||
queueMicrotask(() =>
|
||||
this.onerror?.(new ProgressEvent('error') as ProgressEvent<FileReader>)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function mockFileReaderAbort(method: ReadMethod): void {
|
||||
vi.spyOn(FileReader.prototype, method).mockImplementation(
|
||||
function (this: FileReader) {
|
||||
queueMicrotask(() =>
|
||||
this.onabort?.(new ProgressEvent('abort') as ProgressEvent<FileReader>)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function mockFileReaderResult(
|
||||
method: ReadMethod,
|
||||
result: string | ArrayBuffer | null
|
||||
): void {
|
||||
vi.spyOn(FileReader.prototype, method).mockImplementation(
|
||||
function (this: FileReader) {
|
||||
Object.defineProperty(this, 'result', {
|
||||
value: result,
|
||||
configurable: true
|
||||
})
|
||||
queueMicrotask(() =>
|
||||
this.onload?.(new ProgressEvent('load') as ProgressEvent<FileReader>)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
BIN
src/scripts/metadata/__fixtures__/with_metadata.avif
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.avif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 552 B |
BIN
src/scripts/metadata/__fixtures__/with_metadata.flac
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.flac
Normal file
Binary file not shown.
BIN
src/scripts/metadata/__fixtures__/with_metadata.mp3
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.mp3
Normal file
Binary file not shown.
BIN
src/scripts/metadata/__fixtures__/with_metadata.mp4
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.mp4
Normal file
Binary file not shown.
BIN
src/scripts/metadata/__fixtures__/with_metadata.opus
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.opus
Normal file
Binary file not shown.
BIN
src/scripts/metadata/__fixtures__/with_metadata.webm
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.webm
Normal file
Binary file not shown.
BIN
src/scripts/metadata/__fixtures__/with_metadata.webp
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 266 B |
BIN
src/scripts/metadata/__fixtures__/with_metadata_exif_prefix.webp
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata_exif_prefix.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 272 B |
@@ -1,7 +1,76 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromAvifFile } from './avif'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.avif')
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
describe('AVIF metadata', () => {
|
||||
it('extracts workflow and prompt from EXIF data in ISOBMFF boxes', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.avif', { type: 'image/avif' })
|
||||
|
||||
const result = await getFromAvifFile(file)
|
||||
|
||||
expect(JSON.parse(result.workflow)).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(JSON.parse(result.prompt)).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns empty for non-AVIF data', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new Uint8Array(16)], 'fake.avif')
|
||||
|
||||
const result = await getFromAvifFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
expect(console.error).toHaveBeenCalledWith('Not a valid AVIF file')
|
||||
})
|
||||
|
||||
it('returns empty when AVIF has valid ftyp but corrupt internal boxes', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
|
||||
const buf = new Uint8Array(40)
|
||||
const dv = new DataView(buf.buffer)
|
||||
dv.setUint32(0, 16)
|
||||
buf.set(new TextEncoder().encode('ftypavif'), 4)
|
||||
dv.setUint32(16, 24)
|
||||
buf.set(new TextEncoder().encode('meta'), 20)
|
||||
|
||||
const file = new File([buf], 'corrupt.avif', { type: 'image/avif' })
|
||||
const result = await getFromAvifFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
expect(console.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error parsing AVIF metadata'),
|
||||
expect.anything()
|
||||
)
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.avif')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
expect(await getFromAvifFile(file)).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
expect(await getFromAvifFile(file)).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
const setU32BE = (dv: DataView, off: number, val: number) =>
|
||||
dv.setUint32(off, val, false)
|
||||
const setU16BE = (dv: DataView, off: number, val: number) =>
|
||||
|
||||
@@ -407,6 +407,7 @@ export function getFromAvifFile(file: File): Promise<Record<string, string>> {
|
||||
console.error('FileReader: Error reading AVIF file:', err)
|
||||
resolve({})
|
||||
}
|
||||
reader.onabort = () => resolve({})
|
||||
reader.readAsArrayBuffer(file)
|
||||
})
|
||||
}
|
||||
|
||||
49
src/scripts/metadata/ebml.test.ts
Normal file
49
src/scripts/metadata/ebml.test.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromWebmFile } from './ebml'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.webm')
|
||||
|
||||
describe('WebM/EBML metadata', () => {
|
||||
it('extracts workflow and prompt from EBML SimpleTag elements', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.webm', { type: 'video/webm' })
|
||||
|
||||
const result = await getFromWebmFile(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns empty for non-WebM data', async () => {
|
||||
const file = new File([new Uint8Array(16)], 'fake.webm')
|
||||
|
||||
const result = await getFromWebmFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const file = new File([new Uint8Array(16)], 'test.webm')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
expect(await getFromWebmFile(file)).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
expect(await getFromWebmFile(file)).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -353,6 +353,7 @@ export function getFromWebmFile(file: File): Promise<ComfyMetadata> {
|
||||
const reader = new FileReader()
|
||||
reader.onload = (event) => handleFileLoad(event, resolve)
|
||||
reader.onerror = () => resolve({})
|
||||
reader.onabort = () => resolve({})
|
||||
reader.readAsArrayBuffer(file.slice(0, MAX_READ_BYTES))
|
||||
})
|
||||
}
|
||||
|
||||
56
src/scripts/metadata/flac.test.ts
Normal file
56
src/scripts/metadata/flac.test.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromFlacBuffer, getFromFlacFile } from './flac'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.flac')
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
describe('FLAC metadata', () => {
|
||||
it('extracts workflow and prompt from Vorbis comments', () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const buffer = bytes.buffer.slice(
|
||||
bytes.byteOffset,
|
||||
bytes.byteOffset + bytes.byteLength
|
||||
)
|
||||
|
||||
const result = getFromFlacBuffer(buffer)
|
||||
|
||||
expect(result.workflow).toBe(JSON.stringify(EXPECTED_WORKFLOW))
|
||||
expect(result.prompt).toBe(JSON.stringify(EXPECTED_PROMPT))
|
||||
})
|
||||
|
||||
it('returns undefined for non-FLAC data', () => {
|
||||
const buf = new ArrayBuffer(16)
|
||||
const result = getFromFlacBuffer(buf)
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.flac')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
|
||||
const result = await getFromFlacFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
|
||||
const result = await getFromFlacFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -42,6 +42,8 @@ export function getFromFlacFile(file: File): Promise<Record<string, string>> {
|
||||
const arrayBuffer = event.target.result as ArrayBuffer
|
||||
r(getFromFlacBuffer(arrayBuffer))
|
||||
}
|
||||
reader.onerror = () => r({})
|
||||
reader.onabort = () => r({})
|
||||
reader.readAsArrayBuffer(file)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { ASCII, GltfSizeBytes } from '@/types/metadataTypes'
|
||||
|
||||
import {
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getGltfBinaryMetadata } from './gltf'
|
||||
|
||||
describe('GLTF binary metadata parser', () => {
|
||||
@@ -160,4 +164,20 @@ describe('GLTF binary metadata parser', () => {
|
||||
const metadata = await getGltfBinaryMetadata(invalidEmptyFile)
|
||||
expect(metadata).toEqual({})
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const file = new File([new Uint8Array(16)], 'test.glb')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
expect(await getGltfBinaryMetadata(file)).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
expect(await getGltfBinaryMetadata(file)).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -165,6 +165,7 @@ export function getGltfBinaryMetadata(file: File): Promise<ComfyMetadata> {
|
||||
}
|
||||
}
|
||||
reader.onerror = () => resolve({})
|
||||
reader.onabort = () => resolve({})
|
||||
reader.readAsArrayBuffer(file.slice(0, bytesToRead))
|
||||
})
|
||||
}
|
||||
|
||||
52
src/scripts/metadata/isobmff.test.ts
Normal file
52
src/scripts/metadata/isobmff.test.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromIsobmffFile } from './isobmff'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.mp4')
|
||||
|
||||
describe('ISOBMFF (MP4) metadata', () => {
|
||||
it('extracts workflow and prompt from QuickTime keys/ilst boxes', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.mp4', { type: 'video/mp4' })
|
||||
|
||||
const result = await getFromIsobmffFile(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns empty for non-ISOBMFF data', async () => {
|
||||
const file = new File([new Uint8Array(16)], 'fake.mp4', {
|
||||
type: 'video/mp4'
|
||||
})
|
||||
|
||||
const result = await getFromIsobmffFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const file = new File([new Uint8Array(16)], 'test.mp4')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
expect(await getFromIsobmffFile(file)).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
expect(await getFromIsobmffFile(file)).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -274,6 +274,7 @@ export function getFromIsobmffFile(file: File): Promise<ComfyMetadata> {
|
||||
console.error('FileReader: Error reading ISOBMFF file:', err)
|
||||
resolve({})
|
||||
}
|
||||
reader.onabort = () => resolve({})
|
||||
reader.readAsArrayBuffer(file.slice(0, MAX_READ_BYTES))
|
||||
})
|
||||
}
|
||||
|
||||
91
src/scripts/metadata/json.test.ts
Normal file
91
src/scripts/metadata/json.test.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError,
|
||||
mockFileReaderResult
|
||||
} from './__fixtures__/helpers'
|
||||
import { getDataFromJSON } from './json'
|
||||
|
||||
function jsonFile(content: object): File {
|
||||
return new File([JSON.stringify(content)], 'test.json', {
|
||||
type: 'application/json'
|
||||
})
|
||||
}
|
||||
|
||||
describe('getDataFromJSON', () => {
|
||||
it('detects API-format workflows by class_type on every value', async () => {
|
||||
const apiData = {
|
||||
'1': { class_type: 'KSampler', inputs: {} },
|
||||
'2': { class_type: 'EmptyLatentImage', inputs: {} }
|
||||
}
|
||||
|
||||
const result = await getDataFromJSON(jsonFile(apiData))
|
||||
|
||||
expect(result).toEqual({ prompt: apiData })
|
||||
})
|
||||
|
||||
it('treats objects without universal class_type as a workflow', async () => {
|
||||
const workflow = { nodes: [], links: [], version: 1 }
|
||||
|
||||
const result = await getDataFromJSON(jsonFile(workflow))
|
||||
|
||||
expect(result).toEqual({ workflow })
|
||||
})
|
||||
|
||||
it('extracts templates when the root object has a templates key', async () => {
|
||||
const templates = [{ name: 'basic' }]
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ templates }))
|
||||
|
||||
expect(result).toEqual({ templates })
|
||||
})
|
||||
|
||||
it('returns undefined for non-JSON content', async () => {
|
||||
const file = new File(['not valid json'], 'bad.json', {
|
||||
type: 'application/json'
|
||||
})
|
||||
|
||||
const result = await getDataFromJSON(file)
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('resolves undefined when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsText')
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ nodes: [] }))
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('resolves undefined when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsText')
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ nodes: [] }))
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('resolves undefined when reader.result is not a string', async () => {
|
||||
mockFileReaderResult('readAsText', new ArrayBuffer(8))
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ nodes: [] }))
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('resolves undefined when reader.result is null', async () => {
|
||||
mockFileReaderResult('readAsText', null)
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ nodes: [] }))
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -6,21 +6,28 @@ export function getDataFromJSON(
|
||||
return new Promise<Record<string, object> | undefined>((resolve) => {
|
||||
const reader = new FileReader()
|
||||
reader.onload = async () => {
|
||||
const readerResult = reader.result as string
|
||||
const jsonContent = JSON.parse(readerResult)
|
||||
if (jsonContent?.templates) {
|
||||
resolve({ templates: jsonContent.templates })
|
||||
return
|
||||
try {
|
||||
if (typeof reader.result !== 'string') {
|
||||
resolve(undefined)
|
||||
return
|
||||
}
|
||||
const jsonContent = JSON.parse(reader.result)
|
||||
if (jsonContent?.templates) {
|
||||
resolve({ templates: jsonContent.templates })
|
||||
return
|
||||
}
|
||||
if (isApiJson(jsonContent)) {
|
||||
resolve({ prompt: jsonContent })
|
||||
return
|
||||
}
|
||||
resolve({ workflow: jsonContent })
|
||||
} catch {
|
||||
resolve(undefined)
|
||||
}
|
||||
if (isApiJson(jsonContent)) {
|
||||
resolve({ prompt: jsonContent })
|
||||
return
|
||||
}
|
||||
resolve({ workflow: jsonContent })
|
||||
return
|
||||
}
|
||||
reader.onerror = () => resolve(undefined)
|
||||
reader.onabort = () => resolve(undefined)
|
||||
reader.readAsText(file)
|
||||
return
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
106
src/scripts/metadata/mp3.test.ts
Normal file
106
src/scripts/metadata/mp3.test.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getMp3Metadata } from './mp3'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.mp3')
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
describe('MP3 metadata', () => {
|
||||
it('extracts workflow and prompt from ID3 tags', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.mp3', { type: 'audio/mpeg' })
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns undefined fields when file has no embedded metadata', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new Uint8Array(16)], 'empty.mp3', {
|
||||
type: 'audio/mpeg'
|
||||
})
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result.workflow).toBeUndefined()
|
||||
expect(result.prompt).toBeUndefined()
|
||||
expect(console.error).toHaveBeenCalledWith('Invalid file signature.')
|
||||
})
|
||||
|
||||
it('does not log an invalid signature for a valid MP3 sync header', async () => {
|
||||
const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const buf = new Uint8Array(16)
|
||||
buf[0] = 0xff
|
||||
buf[1] = 0xfb
|
||||
const file = new File([buf], 'valid.mp3', { type: 'audio/mpeg' })
|
||||
|
||||
await getMp3Metadata(file)
|
||||
|
||||
expect(errorSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does not log an invalid signature for a valid ID3v2 header', async () => {
|
||||
const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const buf = new Uint8Array(16)
|
||||
buf[0] = 0x49
|
||||
buf[1] = 0x44
|
||||
buf[2] = 0x33
|
||||
const file = new File([buf], 'valid-id3.mp3', { type: 'audio/mpeg' })
|
||||
|
||||
await getMp3Metadata(file)
|
||||
|
||||
expect(errorSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('extracts metadata that spans the 4096-byte page boundary', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const metadata =
|
||||
`prompt\0${JSON.stringify(EXPECTED_PROMPT)}\0` +
|
||||
`workflow\0${JSON.stringify(EXPECTED_WORKFLOW)}\0`
|
||||
const metadataStart = 4090
|
||||
const size = metadataStart + metadata.length + 4
|
||||
const buf = new Uint8Array(size)
|
||||
for (let i = 0; i < metadata.length; i++) {
|
||||
buf[metadataStart + i] = metadata.charCodeAt(i)
|
||||
}
|
||||
buf[size - 2] = 0xff
|
||||
buf[size - 1] = 0xfb
|
||||
const file = new File([buf], 'large.mp3', { type: 'audio/mpeg' })
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.mp3')
|
||||
|
||||
it('resolves undefined fields when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result).toEqual({ prompt: undefined, workflow: undefined })
|
||||
})
|
||||
|
||||
it('resolves undefined fields when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result).toEqual({ prompt: undefined, workflow: undefined })
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,21 +1,28 @@
|
||||
export async function getMp3Metadata(file: File) {
|
||||
const reader = new FileReader()
|
||||
const read_process = new Promise(
|
||||
(r) => (reader.onload = (event) => r(event?.target?.result))
|
||||
)
|
||||
const read_process = new Promise<ArrayBuffer | null>((r) => {
|
||||
reader.onload = (event) => r((event?.target?.result as ArrayBuffer) ?? null)
|
||||
reader.onerror = () => r(null)
|
||||
reader.onabort = () => r(null)
|
||||
})
|
||||
reader.readAsArrayBuffer(file)
|
||||
const arrayBuffer = (await read_process) as ArrayBuffer
|
||||
const arrayBuffer = await read_process
|
||||
if (!arrayBuffer) return { prompt: undefined, workflow: undefined }
|
||||
//https://stackoverflow.com/questions/7302439/how-can-i-determine-that-a-particular-file-is-in-fact-an-mp3-file#7302482
|
||||
const sig_bytes = new Uint8Array(arrayBuffer, 0, 3)
|
||||
if (
|
||||
(sig_bytes[0] != 0xff && sig_bytes[1] != 0xfb) ||
|
||||
(sig_bytes[0] != 0x49 && sig_bytes[1] != 0x44 && sig_bytes[2] != 0x33)
|
||||
(sig_bytes[0] != 0xff || sig_bytes[1] != 0xfb) &&
|
||||
(sig_bytes[0] != 0x49 || sig_bytes[1] != 0x44 || sig_bytes[2] != 0x33)
|
||||
)
|
||||
console.error('Invalid file signature.')
|
||||
let header = ''
|
||||
while (header.length < arrayBuffer.byteLength) {
|
||||
const page = String.fromCharCode(
|
||||
...new Uint8Array(arrayBuffer, header.length, header.length + 4096)
|
||||
...new Uint8Array(
|
||||
arrayBuffer,
|
||||
header.length,
|
||||
Math.min(4096, arrayBuffer.byteLength - header.length)
|
||||
)
|
||||
)
|
||||
header += page
|
||||
if (page.match('\u00ff\u00fb')) break
|
||||
|
||||
74
src/scripts/metadata/ogg.test.ts
Normal file
74
src/scripts/metadata/ogg.test.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getOggMetadata } from './ogg'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.opus')
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
describe('OGG/Opus metadata', () => {
|
||||
it('extracts workflow and prompt from an Opus file', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.opus', { type: 'audio/ogg' })
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns undefined fields for non-OGG data', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new Uint8Array(16)], 'fake.ogg', {
|
||||
type: 'audio/ogg'
|
||||
})
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result.workflow).toBeUndefined()
|
||||
expect(result.prompt).toBeUndefined()
|
||||
expect(console.error).toHaveBeenCalledWith('Invalid file signature.')
|
||||
})
|
||||
|
||||
it('handles files larger than 4096 bytes without RangeError', async () => {
|
||||
const size = 5000
|
||||
const buf = new Uint8Array(size)
|
||||
const oggs = new TextEncoder().encode('OggS\0')
|
||||
buf.set(oggs, 0)
|
||||
buf.set(oggs, 4500)
|
||||
const file = new File([buf], 'large.ogg', { type: 'audio/ogg' })
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result.workflow).toBeUndefined()
|
||||
expect(result.prompt).toBeUndefined()
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.ogg')
|
||||
|
||||
it('resolves undefined fields when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result).toEqual({ prompt: undefined, workflow: undefined })
|
||||
})
|
||||
|
||||
it('resolves undefined fields when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result).toEqual({ prompt: undefined, workflow: undefined })
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,17 +1,24 @@
|
||||
export async function getOggMetadata(file: File) {
|
||||
const reader = new FileReader()
|
||||
const read_process = new Promise(
|
||||
(r) => (reader.onload = (event) => r(event?.target?.result))
|
||||
)
|
||||
const read_process = new Promise<ArrayBuffer | null>((r) => {
|
||||
reader.onload = (event) => r((event?.target?.result as ArrayBuffer) ?? null)
|
||||
reader.onerror = () => r(null)
|
||||
reader.onabort = () => r(null)
|
||||
})
|
||||
reader.readAsArrayBuffer(file)
|
||||
const arrayBuffer = (await read_process) as ArrayBuffer
|
||||
const arrayBuffer = await read_process
|
||||
if (!arrayBuffer) return { prompt: undefined, workflow: undefined }
|
||||
const signature = String.fromCharCode(...new Uint8Array(arrayBuffer, 0, 4))
|
||||
if (signature !== 'OggS') console.error('Invalid file signature.')
|
||||
let oggs = 0
|
||||
let header = ''
|
||||
while (header.length < arrayBuffer.byteLength) {
|
||||
const page = String.fromCharCode(
|
||||
...new Uint8Array(arrayBuffer, header.length, header.length + 4096)
|
||||
...new Uint8Array(
|
||||
arrayBuffer,
|
||||
header.length,
|
||||
Math.min(4096, arrayBuffer.byteLength - header.length)
|
||||
)
|
||||
)
|
||||
if (page.match('OggS\u0000')) oggs++
|
||||
header += page
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { getFromPngBuffer } from './png'
|
||||
import {
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromPngBuffer, getFromPngFile } from './png'
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const PNG_SIGNATURE = [0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]
|
||||
|
||||
function createPngWithChunk(
|
||||
chunkType: string,
|
||||
keyword: string,
|
||||
content: string,
|
||||
content: string | Uint8Array,
|
||||
options: {
|
||||
compressionFlag?: number
|
||||
compressionMethod?: number
|
||||
@@ -20,12 +28,11 @@ function createPngWithChunk(
|
||||
translatedKeyword = ''
|
||||
} = options
|
||||
|
||||
const signature = new Uint8Array([
|
||||
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a
|
||||
])
|
||||
const signature = new Uint8Array(PNG_SIGNATURE)
|
||||
const typeBytes = new TextEncoder().encode(chunkType)
|
||||
const keywordBytes = new TextEncoder().encode(keyword)
|
||||
const contentBytes = new TextEncoder().encode(content)
|
||||
const contentBytes =
|
||||
content instanceof Uint8Array ? content : new TextEncoder().encode(content)
|
||||
|
||||
let chunkData: Uint8Array
|
||||
if (chunkType === 'iTXt') {
|
||||
@@ -66,12 +73,11 @@ function createPngWithChunk(
|
||||
new DataView(lengthBytes.buffer).setUint32(0, chunkData.length, false)
|
||||
|
||||
const crc = new Uint8Array(4)
|
||||
|
||||
const iendType = new TextEncoder().encode('IEND')
|
||||
const iendLength = new Uint8Array(4)
|
||||
const iendCrc = new Uint8Array(4)
|
||||
|
||||
const total = signature.length + 4 + 4 + chunkData.length + 4 + 4 + 4 + 0 + 4
|
||||
const total = signature.length + (4 + 4 + chunkData.length + 4) + (4 + 4 + 4)
|
||||
const result = new Uint8Array(total)
|
||||
|
||||
let offset = 0
|
||||
@@ -138,6 +144,21 @@ describe('getFromPngBuffer', () => {
|
||||
expect(result['workflow']).toBe(workflow)
|
||||
})
|
||||
|
||||
it('logs warning and skips iTXt chunk with unsupported compression method', async () => {
|
||||
vi.spyOn(console, 'warn').mockImplementation(() => {})
|
||||
const buffer = createPngWithChunk('iTXt', 'workflow', 'data', {
|
||||
compressionFlag: 1,
|
||||
compressionMethod: 99
|
||||
})
|
||||
|
||||
const result = await getFromPngBuffer(buffer)
|
||||
|
||||
expect(result['workflow']).toBeUndefined()
|
||||
expect(console.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Unsupported compression method 99')
|
||||
)
|
||||
})
|
||||
|
||||
it('parses compressed iTXt chunk', async () => {
|
||||
const workflow = '{"nodes":[{"id":1,"type":"KSampler"}]}'
|
||||
const contentBytes = new TextEncoder().encode(workflow)
|
||||
@@ -163,83 +184,49 @@ describe('getFromPngBuffer', () => {
|
||||
pos += chunk.length
|
||||
}
|
||||
|
||||
const buffer = createPngWithCompressedITXt(
|
||||
'workflow',
|
||||
compressedBytes,
|
||||
'',
|
||||
''
|
||||
)
|
||||
const buffer = createPngWithChunk('iTXt', 'workflow', compressedBytes, {
|
||||
compressionFlag: 1,
|
||||
compressionMethod: 0
|
||||
})
|
||||
const result = await getFromPngBuffer(buffer)
|
||||
expect(result['workflow']).toBe(workflow)
|
||||
})
|
||||
})
|
||||
|
||||
function createPngWithCompressedITXt(
|
||||
keyword: string,
|
||||
compressedContent: Uint8Array,
|
||||
languageTag: string,
|
||||
translatedKeyword: string
|
||||
): ArrayBuffer {
|
||||
const signature = new Uint8Array([
|
||||
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a
|
||||
])
|
||||
const typeBytes = new TextEncoder().encode('iTXt')
|
||||
const keywordBytes = new TextEncoder().encode(keyword)
|
||||
const langBytes = new TextEncoder().encode(languageTag)
|
||||
const transBytes = new TextEncoder().encode(translatedKeyword)
|
||||
describe('getFromPngFile', () => {
|
||||
it('reads metadata from a File object', async () => {
|
||||
const workflow = '{"nodes":[]}'
|
||||
const buffer = createPngWithChunk('tEXt', 'workflow', workflow)
|
||||
const file = new File([buffer], 'test.png', { type: 'image/png' })
|
||||
|
||||
const totalLength =
|
||||
keywordBytes.length +
|
||||
1 +
|
||||
2 +
|
||||
langBytes.length +
|
||||
1 +
|
||||
transBytes.length +
|
||||
1 +
|
||||
compressedContent.length
|
||||
const result = await getFromPngFile(file)
|
||||
|
||||
const chunkData = new Uint8Array(totalLength)
|
||||
let pos = 0
|
||||
chunkData.set(keywordBytes, pos)
|
||||
pos += keywordBytes.length
|
||||
chunkData[pos++] = 0
|
||||
chunkData[pos++] = 1
|
||||
chunkData[pos++] = 0
|
||||
chunkData.set(langBytes, pos)
|
||||
pos += langBytes.length
|
||||
chunkData[pos++] = 0
|
||||
chunkData.set(transBytes, pos)
|
||||
pos += transBytes.length
|
||||
chunkData[pos++] = 0
|
||||
chunkData.set(compressedContent, pos)
|
||||
expect(result['workflow']).toBe(workflow)
|
||||
})
|
||||
|
||||
const lengthBytes = new Uint8Array(4)
|
||||
new DataView(lengthBytes.buffer).setUint32(0, chunkData.length, false)
|
||||
it('returns empty for an invalid PNG File', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new ArrayBuffer(8)], 'bad.png', {
|
||||
type: 'image/png'
|
||||
})
|
||||
|
||||
const crc = new Uint8Array(4)
|
||||
const iendType = new TextEncoder().encode('IEND')
|
||||
const iendLength = new Uint8Array(4)
|
||||
const iendCrc = new Uint8Array(4)
|
||||
const result = await getFromPngFile(file)
|
||||
|
||||
const total = signature.length + 4 + 4 + chunkData.length + 4 + 4 + 4 + 0 + 4
|
||||
const result = new Uint8Array(total)
|
||||
expect(result).toEqual({})
|
||||
expect(console.error).toHaveBeenCalledWith('Not a valid PNG file')
|
||||
})
|
||||
|
||||
let offset = 0
|
||||
result.set(signature, offset)
|
||||
offset += signature.length
|
||||
result.set(lengthBytes, offset)
|
||||
offset += 4
|
||||
result.set(typeBytes, offset)
|
||||
offset += 4
|
||||
result.set(chunkData, offset)
|
||||
offset += chunkData.length
|
||||
result.set(crc, offset)
|
||||
offset += 4
|
||||
result.set(iendLength, offset)
|
||||
offset += 4
|
||||
result.set(iendType, offset)
|
||||
offset += 4
|
||||
result.set(iendCrc, offset)
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.png')
|
||||
|
||||
return result.buffer
|
||||
}
|
||||
it('rejects when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
await expect(getFromPngFile(file)).rejects.toBeDefined()
|
||||
})
|
||||
|
||||
it('rejects when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
await expect(getFromPngFile(file)).rejects.toThrow('FileReader aborted')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -126,6 +126,7 @@ export async function getFromPngFile(
|
||||
resolve(result)
|
||||
}
|
||||
reader.onerror = () => reject(reader.error)
|
||||
reader.onabort = () => reject(new Error('FileReader aborted'))
|
||||
reader.readAsArrayBuffer(file)
|
||||
})
|
||||
}
|
||||
|
||||
42
src/scripts/metadata/svg.test.ts
Normal file
42
src/scripts/metadata/svg.test.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { getSvgMetadata } from './svg'
|
||||
|
||||
function svgFile(content: string): File {
|
||||
return new File([content], 'test.svg', { type: 'image/svg+xml' })
|
||||
}
|
||||
|
||||
describe('getSvgMetadata', () => {
|
||||
it('extracts workflow and prompt from CDATA in <metadata>', async () => {
|
||||
const svg = `<svg xmlns="http://www.w3.org/2000/svg">
|
||||
<metadata><![CDATA[${JSON.stringify({
|
||||
workflow: { nodes: [] },
|
||||
prompt: { '1': {} }
|
||||
})}]]></metadata>
|
||||
<rect width="1" height="1"/>
|
||||
</svg>`
|
||||
|
||||
const result = await getSvgMetadata(svgFile(svg))
|
||||
|
||||
expect(result).toEqual({
|
||||
workflow: { nodes: [] },
|
||||
prompt: { '1': {} }
|
||||
})
|
||||
})
|
||||
|
||||
it('returns empty when SVG has no metadata element', async () => {
|
||||
const svg = '<svg xmlns="http://www.w3.org/2000/svg"><rect/></svg>'
|
||||
|
||||
const result = await getSvgMetadata(svgFile(svg))
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it('returns empty when CDATA contains invalid JSON', async () => {
|
||||
const svg = `<svg><metadata><![CDATA[not valid json]]></metadata></svg>`
|
||||
|
||||
const result = await getSvgMetadata(svgFile(svg))
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
})
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user