Compare commits

...

128 Commits

Author SHA1 Message Date
Terry Jia
cb3c415065 [3d] flash preview screen board if reach out limitation 2025-02-19 15:30:38 -05:00
Terry Jia
d3ab23a532 [3d] flash preview screen board if reach out limitation 2025-02-19 15:27:22 -05:00
filtered
08a6867c00 [Desktop] Offer Troubleshoot page instead of Reinstall on start error (#2623)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-19 10:30:23 -05:00
filtered
dbbe67dfcd [Desktop] Fix missing git logo in troubleshooting (#2633) 2025-02-19 10:29:48 -05:00
bymyself
40fa1d37bc Fix pasting image that was copied from browser (#2630) 2025-02-19 10:27:58 -05:00
filtered
0d6bc669f5 [Desktop] Fix invalid type assertion in API (#2631) 2025-02-19 21:59:17 +11:00
Chenlei Hu
e4444d4074 1.10.6 (#2628) 2025-02-18 20:33:58 -05:00
Chenlei Hu
cbf5dff633 Update litegraph 0.8.87 (#2625)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-18 20:25:17 -05:00
Chenlei Hu
9de8450deb Update test expectations (#2627)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-18 20:25:06 -05:00
Chenlei Hu
3b0e3d635b [BugFix] Fix node color for custom light themes (#2621)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-18 19:08:34 -05:00
Chenlei Hu
d1a682bc01 [Refactor] Extract color selector as component (#2620) 2025-02-18 15:28:17 -05:00
Terry Jia
01ffc9e4eb [3d] allow using mouse wheel to adjust preview screen size (#2619) 2025-02-18 14:59:43 -05:00
Chenlei Hu
54e42178f7 1.10.5 (#2617) 2025-02-18 12:26:27 -05:00
Chenlei Hu
25e5ab3a36 Add bypass action to selection toolbox (#2616) 2025-02-18 12:25:49 -05:00
Chenlei Hu
28dd6a2702 Update litegraph 0.8.85 (#2615) 2025-02-18 11:51:36 -05:00
bymyself
3b3df250cd Add refresh button to selecton toolbox (#2612) 2025-02-18 11:39:43 -05:00
bymyself
6441a86619 [Style] Update toolbox style (#2614) 2025-02-18 11:34:06 -05:00
Chenlei Hu
79db202925 [New Feature] Selection Toolbox (#2608)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-17 19:07:49 -05:00
Chenlei Hu
f7556e0015 Add DeleteSelectedItems command (#2606) 2025-02-17 17:16:12 -05:00
bymyself
141e64354c Support batch image upload (#2597) 2025-02-17 13:56:21 -05:00
bymyself
79452ce267 Fix extraneous values in template workflows (#2605) 2025-02-17 13:55:29 -05:00
Chenlei Hu
4d8a5eacba 1.10.4 (#2604) 2025-02-17 10:13:43 -05:00
bymyself
8f5a9a50aa Remove duplicate outpaint template (#2602)
Co-authored-by: github-actions <github-actions@github.com>
Co-authored-by: Chenlei Hu <huchenlei@proton.me>
2025-02-17 10:12:09 -05:00
Margen67
7bc48c5074 Formatting/cleanup (#2594) 2025-02-17 10:10:00 -05:00
Dr.Lt.Data
e04ea07774 refine locales/ko (#2600) 2025-02-17 10:09:09 -05:00
bymyself
75af956279 Fix gallery navigator icons (#2601) 2025-02-17 10:08:46 -05:00
bymyself
434a2307a2 Remove lora dependency from flux canny template (#2603) 2025-02-17 10:08:16 -05:00
filtered
336b3caf9a [Desktop] Update uv cache clear task to show terminal (#2598) 2025-02-17 23:33:34 +11:00
filtered
c757fbaeb4 [Test] Fix unnecessary circular reference (#2596) 2025-02-17 20:18:26 +11:00
Chenlei Hu
fd27b3d580 Fix title editor font size (#2593) 2025-02-16 21:48:02 -05:00
Chenlei Hu
0658698a13 Selection Overlay (#2592) 2025-02-16 21:23:07 -05:00
Terry Jia
b2375a150c [3d] fully convert load 3d nodes into vue (#2590) 2025-02-16 20:15:49 -05:00
Chenlei Hu
9ebb5b2a0c 1.10.3 (#2591) 2025-02-16 20:14:50 -05:00
Chenlei Hu
d6a5deccd8 [Refactor] useAbsolutePosition composable (#2589) 2025-02-16 15:39:58 -05:00
Chenlei Hu
3f4d11c63a Inplace widget to input conversion (#2588)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-16 13:41:32 -05:00
Margen67
44498739fc Update setup-node to v4 (#2587) 2025-02-16 13:14:01 -05:00
dependabot[bot]
764ec9f7d0 Bump vite from 5.4.6 to 5.4.14 (#2585)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-02-16 11:23:17 -05:00
bymyself
e3234aa0aa Normalize translation keys in template card component (#2574)
Co-authored-by: github-actions <github-actions@github.com>
Co-authored-by: huchenlei <huchenlei@proton.me>
2025-02-16 10:17:49 -05:00
bymyself
df11c99393 Refactor node image upload and preview (#2580)
Co-authored-by: huchenlei <huchenlei@proton.me>
2025-02-16 10:09:02 -05:00
Chenlei Hu
317ea8b932 1.10.2 (#2583) 2025-02-16 09:49:25 -05:00
bymyself
108884a304 Replace "clip" with "text_encoders" in template workflows (#2572) 2025-02-16 09:39:10 -05:00
bymyself
9f1992ca59 Change title of pose ControlNet template workflow (#2573) 2025-02-16 09:38:18 -05:00
bymyself
39f245fd97 Remove interchangeable models from template workflows (#2575) 2025-02-16 09:36:34 -05:00
bymyself
2d2fa5bfe9 Fix incorrect link in template workflow (#2579) 2025-02-16 09:35:54 -05:00
Terry Jia
bfb1b80cd7 [3d] bug fix for unable click vue button (#2581) 2025-02-16 09:35:20 -05:00
bymyself
0c8bfb4650 Add stable zero model url to whitelist (#2577) 2025-02-16 08:42:50 -05:00
bymyself
f69180cd84 Move node field to tags in error report (#2570)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-15 21:50:54 -05:00
Chenlei Hu
2ac177caeb 1.10.1 (#2569) 2025-02-15 19:34:13 -05:00
bymyself
77d3e0c45e Add ComfyUI Examples workflows to in-app templates (#2541)
Co-authored-by: jojodecayz <121620462+jojodecayz@users.noreply.github.com>
Co-authored-by: github-actions <github-actions@github.com>
2025-02-15 16:15:56 -05:00
Chenlei Hu
00dceb880a Update litegraph 0.8.81 (#2568) 2025-02-15 15:06:21 -05:00
Chenlei Hu
acea173ba0 [BugFix] Set height to 0 for hidden widgets on GroupNode (#2566) 2025-02-15 12:24:38 -05:00
Chenlei Hu
bcedd5f4ed [Style] Show right arrow on nested submenu item (#2564) 2025-02-15 11:18:46 -05:00
Chenlei Hu
168ea05f81 [Style] Highlight active workflow item in the workflow sidebar (#2563) 2025-02-15 11:01:47 -05:00
Chenlei Hu
370ad7a4f9 1.10.0 (#2562) 2025-02-15 09:59:56 -05:00
Hiroaki Ogasawara
b9cfa70dcd chore: Add type augmentations for litegraph and enable copying of DTS (#2560) 2025-02-15 09:57:27 -05:00
Chenlei Hu
c15201bfe2 [Cleanup] Remove unused imports in domWidget (#2559) 2025-02-14 22:09:27 -05:00
Chenlei Hu
0e2ce5e1ca Upstream widgets layout to litegraph (#2557) 2025-02-14 19:40:31 -05:00
Chenlei Hu
5dc4a1b9cd Update litegraph 0.8.79 (#2556)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-14 14:42:53 -05:00
Chenlei Hu
acfb95f8d4 Use LGraphNode.setSize (#2555) 2025-02-14 11:02:56 -05:00
Chenlei Hu
f2065777b5 1.9.17 (#2553) 2025-02-14 09:58:19 -05:00
Yuki Shindo
b8b1e58172 [keybinding-panel] Include label in filter fields for multilingual search (#2549) 2025-02-14 09:48:44 -05:00
bymyself
2e86393378 Add null checks to widget constructor options (#2552) 2025-02-14 09:48:07 -05:00
bymyself
530ca75dd0 Fix remote widget undefined arg (#2551) 2025-02-14 09:47:18 -05:00
Chenlei Hu
f9c2db5908 Type LGraphNode.addDOMWidget (#2548) 2025-02-13 15:15:21 -05:00
Chenlei Hu
166ad432f3 [Refactor] Generalize dom widget layout as Widget.computeLayoutSize hook (#2547) 2025-02-13 14:30:46 -05:00
Chenlei Hu
174754e646 [Refactor] Extract widget layout logic (#2545) 2025-02-13 11:43:40 -05:00
Chenlei Hu
43dd457bf5 1.9.16 (#2546) 2025-02-13 11:03:16 -05:00
Yuki Shindo
625aa9bd11 Improve Japanese Text for Enhanced Readability (#2542) 2025-02-12 20:58:05 -05:00
Chenlei Hu
f791322ddb Revert "[Cleanup] Remove unused hooks on DOMWidget" (#2543) 2025-02-12 20:57:38 -05:00
Chenlei Hu
89812ce7d0 [Cleanup] Remove unused hooks on DOMWidget (#2540) 2025-02-12 19:41:06 -05:00
Chenlei Hu
c7aaa2a45d Type Widget.computedHeight (#2539) 2025-02-12 16:50:30 -05:00
Chenlei Hu
8bb785c5e4 Show convert to widget option slot context menu (#2538) 2025-02-12 16:07:19 -05:00
Chenlei Hu
a861a070d0 Remove horizontal layout of reroute node (#2532) 2025-02-12 14:43:34 -05:00
Chenlei Hu
108e37deca 1.9.15 (#2535) 2025-02-12 14:19:15 -05:00
Chenlei Hu
9082903956 [Style] Fix pointer events on graph canvas panel content (#2534) 2025-02-12 14:11:54 -05:00
Chenlei Hu
2cb9d4dd1c 1.9.14 (#2531) 2025-02-12 11:16:05 -05:00
bymyself
46f0733ae7 Ignore reserved keybindings when typing in text input (#2514)
Co-authored-by: filtered <176114999+webfiltered@users.noreply.github.com>
2025-02-12 11:15:19 -05:00
Chenlei Hu
150b4341b2 Update litegraph 0.8.77 (#2530) 2025-02-12 11:11:05 -05:00
bymyself
054f8f6838 Move install directory selection tooltip to not block buttons (#2527) 2025-02-12 10:02:56 -05:00
bymyself
9fd73873b6 [Desktop] Re-run path validation when re-focusing installation location input (#2528) 2025-02-12 10:02:42 -05:00
bymyself
bfec9b692b Add keybind hint to confirm close dialog (#2529)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-12 10:02:27 -05:00
Terry Jia
29cd693335 [3d] add tooltip (#2524)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-11 22:28:24 -05:00
Chenlei Hu
4f6891a5ad [Cleanup] Remove legacy settings dialog CSS (#2525) 2025-02-11 21:55:21 -05:00
Chenlei Hu
ca2aee296a [Cleanup] Remove unnecessary pointer-event-auto (#2523) 2025-02-11 21:41:47 -05:00
Chenlei Hu
9017513979 [Refactor] Manage comfyui-body elements with Vue (#2522)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-11 18:37:08 -05:00
Terry Jia
8cfe814daa [3d] refactor load3d (#2521) 2025-02-11 17:43:36 -05:00
Chenlei Hu
c901d5f659 1.9.13 (#2520) 2025-02-11 13:40:17 -05:00
Chenlei Hu
1263fbb4ad Enable ts-strict for scripts/widgets.ts (#2519) 2025-02-11 12:38:24 -05:00
Chenlei Hu
8db101c1cb [Type] Mark app as required arg for ComfyWidgetConstructor (#2518) 2025-02-11 11:39:23 -05:00
Chenlei Hu
efe7843469 [Refactor] useComboWidget composable (#2517) 2025-02-11 11:17:09 -05:00
Chenlei Hu
cfa46ebacb [Refactor] useBooleanWidget composable (#2516) 2025-02-11 11:05:58 -05:00
Chenlei Hu
ab305059bc [Refactor] useImageUploadWidget composable (#2515) 2025-02-11 10:50:55 -05:00
bymyself
cd8c0d2865 Add refresh button to remote (lazy) widgets (#2494) 2025-02-11 10:31:32 -05:00
bymyself
6a9d309818 Increase width of actionbar drag handle (#2511) 2025-02-11 10:29:26 -05:00
bymyself
e3f226e483 Fix actionbar has drag preview (#2512) 2025-02-11 10:28:56 -05:00
bymyself
8822edaf24 Improve settings dialog accessibility (#2513) 2025-02-11 10:28:41 -05:00
Chenlei Hu
44b9a477b1 [Refactor] useMarkdownWidget composable (#2510) 2025-02-11 00:12:29 -05:00
bymyself
e4f8d4b8d0 Add support for image_folder field in node image input spec (#2509) 2025-02-11 00:12:03 -05:00
Chenlei Hu
a93f57eeb2 [Refactor] useSeedWidget composable (#2508) 2025-02-10 23:55:59 -05:00
Chenlei Hu
0c2879b6f4 [Refactor] useIntWidget composable (#2507) 2025-02-10 23:07:57 -05:00
Chenlei Hu
d8d46f8cf6 1.9.12 (#2506) 2025-02-10 22:17:12 -05:00
Chenlei Hu
1a06c91ed1 [BugFix] Workaround custom nodes expectation on DOMWidget.value (#2505) 2025-02-10 22:06:20 -05:00
Chenlei Hu
d4122a7510 [Refactor] useFloatWidget composable (#2504) 2025-02-10 22:06:11 -05:00
Chenlei Hu
b4c59ffae1 [Refactor] useStringWidget composable (#2503) 2025-02-10 19:49:00 -05:00
Chenlei Hu
46428cbf7d [Cleanup] Rename remoteWidgetHook (#2500) 2025-02-10 18:12:15 -05:00
filtered
2d759aa9e3 Fix element style override CSS in #2499 (#2501) 2025-02-10 18:11:32 -05:00
Chenlei Hu
08e613e468 Enable ts-strict for scripts/domWidgets (#2499) 2025-02-10 17:20:30 -05:00
Chenlei Hu
8052b2a02a Type LGraphNode.getInnerNodes (#2498) 2025-02-10 16:35:26 -05:00
Chenlei Hu
0479b112c1 1.9.11 (#2497) 2025-02-10 15:31:17 -05:00
Chenlei Hu
d7a0ee8703 Update litegraph 0.8.76 (#2496)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-10 15:30:46 -05:00
Chenlei Hu
9051ab8d7a Type domWidget computeSize (#2495) 2025-02-10 12:58:40 -05:00
Terry Jia
aaca5191ab Light fov UI change (#2492) 2025-02-10 11:23:33 -05:00
Chenlei Hu
9707a30d0e 1.9.10 (#2491) 2025-02-09 23:39:57 -05:00
Chenlei Hu
e100041db4 Update litegraph 0.8.75 (#2490) 2025-02-09 23:39:27 -05:00
Chenlei Hu
21718d9da2 1.9.9 (#2486) 2025-02-09 20:50:47 -05:00
Chenlei Hu
2b4c594b21 Update litegraph 0.8.74 (#2485) 2025-02-09 20:26:58 -05:00
Chenlei Hu
00abd885c9 [Refactor] ES6 DOMWidget class (#2482) 2025-02-09 20:20:51 -05:00
bymyself
550a9d04c5 Fix color and slider settings types to inherit attrs (#2483) 2025-02-09 20:18:09 -05:00
bymyself
eeb1c34ada Update public API syntax of remote (lazy) widgets (#2477)
Co-authored-by: huchenlei <huchenlei@proton.me>
2025-02-09 12:41:14 -05:00
Terry Jia
83cc49a42b [3d] use vue to rewrite the UI for load3d (#2467) 2025-02-09 12:05:42 -05:00
Chenlei Hu
91a3d1228e Reference specific ComfyUI_devtools in CI (#2479) 2025-02-09 12:00:26 -05:00
Chenlei Hu
3d59d478b6 [Desktop] Fix torch mirror setting (#2475) 2025-02-08 21:08:23 -05:00
Chenlei Hu
4dd292252e 1.9.8 (#2474) 2025-02-08 18:31:16 -05:00
Chenlei Hu
0d307ff587 Update litegraph 0.8.73 (#2472) 2025-02-08 17:58:39 -05:00
Chenlei Hu
88a969df07 Update litegraph 0.8.72 (#2470) 2025-02-08 17:51:37 -05:00
Dr.Lt.Data
9e37738dc8 refine locales/ko (#2469) 2025-02-08 17:31:26 -05:00
Chenlei Hu
9b97abad57 1.9.7 (#2466) 2025-02-08 12:11:24 -05:00
Chenlei Hu
67fcb4fed4 Update litegraph 0.8.71 (#2465) 2025-02-08 12:10:44 -05:00
229 changed files with 25838 additions and 3622 deletions

View File

@@ -2,10 +2,7 @@ name: ESLint
on:
pull_request:
branches:
- main
- master
- 'dev*'
branches: [ main, master, dev* ]
jobs:
eslint:
@@ -13,8 +10,8 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: lts/*
node-version: 'lts/*'
- run: npm ci
- run: npm run lint
- run: npm run lint

View File

@@ -12,12 +12,12 @@ jobs:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: lts/*
node-version: 'lts/*'
- name: Install dependencies
run: npm ci
- name: Run Prettier check
run: npm run format:check
run: npm run format:check

View File

@@ -1,4 +1,5 @@
name: Update Locales for given custom node repository
on:
workflow_dispatch:
inputs:
@@ -23,27 +24,27 @@ jobs:
- name: Checkout ComfyUI
uses: actions/checkout@v4
with:
repository: 'comfyanonymous/ComfyUI'
path: 'ComfyUI'
repository: comfyanonymous/ComfyUI
path: ComfyUI
ref: master
- name: Checkout ComfyUI_frontend
uses: actions/checkout@v4
with:
repository: 'Comfy-Org/ComfyUI_frontend'
path: 'ComfyUI_frontend'
repository: Comfy-Org/ComfyUI_frontend
path: ComfyUI_frontend
- name: Checkout ComfyUI_devtools
uses: actions/checkout@v4
with:
repository: 'Comfy-Org/ComfyUI_devtools'
path: 'ComfyUI/custom_nodes/ComfyUI_devtools'
repository: Comfy-Org/ComfyUI_devtools
path: ComfyUI/custom_nodes/ComfyUI_devtools
- name: Checkout custom node repository
uses: actions/checkout@v4
with:
repository: ${{ inputs.owner }}/${{ inputs.repository }}
path: 'ComfyUI/custom_nodes/${{ inputs.repository }}'
- uses: actions/setup-node@v3
- uses: actions/setup-node@v4
with:
node-version: lts/*
node-version: 'lts/*'
- uses: actions/setup-python@v4
with:
python-version: '3.10'
@@ -53,14 +54,12 @@ jobs:
pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu
pip install -r requirements.txt
pip install wait-for-it
shell: bash
working-directory: ComfyUI
- name: Install custom node requirements
run: |
if [ -f "requirements.txt" ]; then
pip install -r requirements.txt
fi
shell: bash
working-directory: ComfyUI/custom_nodes/${{ inputs.repository }}
- name: Build & Install ComfyUI_frontend
run: |
@@ -68,14 +67,12 @@ jobs:
npm run build
rm -rf ../ComfyUI/web/*
mv dist/* ../ComfyUI/web/
shell: bash
working-directory: ComfyUI_frontend
- name: Start ComfyUI server
run: |
python main.py --cpu --multi-user &
wait-for-it --service 127.0.0.1:8188 -t 600
working-directory: ComfyUI
shell: bash
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend
@@ -153,7 +150,7 @@ jobs:
echo "Pushing changes to ${{ inputs.fork_owner }}/${{ inputs.repository }}"
git push -f git@github.com:${{ inputs.fork_owner }}/${{ inputs.repository }}.git update-locales
- name: Create PR
- name: Create PR
working-directory: ComfyUI/custom_nodes/${{ inputs.repository }}
run: |
# Create PR using gh cli

View File

@@ -13,7 +13,7 @@ jobs:
update-locales:
runs-on: ubuntu-latest
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.1
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend

View File

@@ -1,4 +1,5 @@
name: Update Locales
on:
pull_request:
branches: [ main, master, dev* ]
@@ -9,7 +10,7 @@ jobs:
if: github.event.pull_request.head.repo.full_name == github.repository
runs-on: ubuntu-latest
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.1
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend

View File

@@ -2,12 +2,10 @@ name: Create Release Draft
on:
pull_request:
types: [closed]
branches:
- main
- master
types: [ closed ]
branches: [ main, master ]
paths:
- "package.json"
- 'package.json'
jobs:
draft_release:
@@ -18,9 +16,9 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
- uses: actions/setup-node@v3
- uses: actions/setup-node@v4
with:
node-version: lts/*
node-version: 'lts/*'
- name: Get current version
id: current_version
run: echo ::set-output name=version::$(node -p "require('./package.json').version")
@@ -42,7 +40,7 @@ jobs:
tag_name: v${{ steps.current_version.outputs.version }}
draft: false
prerelease: false
make_latest: "true"
make_latest: true
generate_release_notes: true
publish_types:
runs-on: ubuntu-latest
@@ -51,14 +49,14 @@ jobs:
contains(github.event.pull_request.labels.*.name, 'Release')
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v3
- uses: actions/setup-node@v4
with:
node-version: lts/*
registry-url: "https://registry.npmjs.org"
node-version: 'lts/*'
registry-url: https://registry.npmjs.org
- run: npm ci
- run: npm run build:types
- name: Publish package
run: npm publish --access public
working-directory: ./dist
working-directory: dist
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -1,5 +1,4 @@
# Setting test expectation screenshots for Playwright
name: Update Playwright Expectations
on:
@@ -11,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
if: github.event.label.name == 'New Browser Test Expectations'
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.1
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend

View File

@@ -2,20 +2,17 @@ name: Tests CI
on:
push:
branches:
- main
- master
branches: [ main, master ]
pull_request:
branches:
- main
- master
- 'dev*'
branches: [ main, master, dev* ]
jobs:
jest-tests:
runs-on: ubuntu-latest
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.1
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
with:
devtools_ref: 7b81139e904519db8e5481899ef36bbb4393cb6b
- name: Run Jest tests
run: |
npm run test:generate
@@ -25,7 +22,9 @@ jobs:
playwright-tests-chromium:
runs-on: ubuntu-latest
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.1
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
with:
devtools_ref: 7b81139e904519db8e5481899ef36bbb4393cb6b
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend
@@ -42,7 +41,9 @@ jobs:
playwright-tests-chromium-2x:
runs-on: ubuntu-latest
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.1
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
with:
devtools_ref: 7b81139e904519db8e5481899ef36bbb4393cb6b
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend
@@ -59,7 +60,9 @@ jobs:
playwright-tests-mobile-chrome:
runs-on: ubuntu-latest
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.1
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
with:
devtools_ref: 7b81139e904519db8e5481899ef36bbb4393cb6b
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend

View File

@@ -2,15 +2,9 @@ name: Vitest Tests
on:
push:
branches:
- main
- master
- 'dev*'
branches: [ main, master, dev* ]
pull_request:
branches:
- main
- master
- 'dev*'
branches: [ main, master, dev* ]
jobs:
test:
@@ -20,9 +14,9 @@ jobs:
- uses: actions/checkout@v4
- name: Use Node.js
uses: actions/setup-node@v3
uses: actions/setup-node@v4
with:
node-version: lts/*
node-version: 'lts/*'
- name: Install dependencies
run: npm ci

View File

@@ -18,7 +18,7 @@
{
"name": "fake_model.safetensors",
"url": "http://localhost:8188/api/devtools/fake_model.safetensors",
"directory": "clip"
"directory": "text_encoders"
}
],
"version": 0.4

View File

@@ -0,0 +1,63 @@
{
"last_node_id": 1,
"last_link_id": 0,
"nodes": [
{
"id": 1,
"type": "UNKNOWN NODE",
"pos": [
48,
86
],
"size": {
"0": 358.80780029296875,
"1": 314.7989501953125
},
"flags": {},
"order": 0,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": null,
"slot_index": 0
},
{
"name": "foo",
"type": "STRING",
"link": null,
"slot_index": 1,
"widget": {
"name": "foo"
}
}
],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"links": [],
"slot_index": 0,
"shape": 6
}
],
"properties": {
"Node name for S&R": "UNKNOWN NODE"
},
"widgets_values": [
"wd-v1-4-moat-tagger-v2",
0.35,
0.85,
false,
false,
""
]
}
],
"links": [],
"groups": [],
"config": {},
"extra": {},
"version": 0.4
}

View File

@@ -0,0 +1,43 @@
{
"last_node_id": 10,
"last_link_id": 9,
"nodes": [
{
"id": 10,
"type": "DevToolsNodeWithSeedInput",
"pos": [
20,
50
],
"size": [
315,
82
],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"Node name for S&R": "DevToolsNodeWithSeedInput"
},
"widgets_values": [
0,
"randomize"
]
}
],
"links": [],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1,
"offset": [
0,
0
]
}
},
"version": 0.4
}

View File

@@ -152,9 +152,10 @@ test.describe('Color Palette', () => {
// doesn't update the store immediately.
await comfyPage.setup()
await comfyPage.loadWorkflow('every_node_color')
await comfyPage.setSetting('Comfy.ColorPalette', 'obsidian_dark')
await expect(comfyPage.canvas).toHaveScreenshot(
'custom-color-palette-obsidian-dark.png'
'custom-color-palette-obsidian-dark-all-colors.png'
)
await comfyPage.setSetting('Comfy.ColorPalette', 'light_red')
await comfyPage.nextFrame()
@@ -232,7 +233,7 @@ test.describe('Node Color Adjustments', () => {
const workflow = await comfyPage.page.evaluate(() => {
return localStorage.getItem('workflow')
})
for (const node of JSON.parse(workflow).nodes) {
for (const node of JSON.parse(workflow ?? '{}').nodes) {
if (node.bgcolor) expect(node.bgcolor).not.toMatch(/hsla/)
if (node.color) expect(node.color).not.toMatch(/hsla/)
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 106 KiB

After

Width:  |  Height:  |  Size: 145 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 102 KiB

After

Width:  |  Height:  |  Size: 139 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 141 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 133 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 100 KiB

After

Width:  |  Height:  |  Size: 141 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 98 KiB

After

Width:  |  Height:  |  Size: 135 KiB

View File

@@ -85,8 +85,8 @@ test.describe('Missing models warning', () => {
status: 200,
body: JSON.stringify([
{
name: 'clip',
folders: ['ComfyUI/models/clip']
name: 'text_encoders',
folders: ['ComfyUI/models/text_encoders']
}
])
}
@@ -109,7 +109,7 @@ test.describe('Missing models warning', () => {
])
}
comfyPage.page.route(
'**/api/experiment/models/clip',
'**/api/experiment/models/text_encoders',
(route) => route.fulfill(clipModelsRes),
{ times: 1 }
)

View File

@@ -1,5 +1,6 @@
import { expect } from '@playwright/test'
import { SettingParams } from '../src/types/settingTypes'
import { comfyPageFixture as test } from './fixtures/ComfyPage'
test.describe('Topbar commands', () => {
@@ -134,6 +135,90 @@ test.describe('Topbar commands', () => {
expect(await comfyPage.getSetting('Comfy.TestSetting')).toBe(true)
expect(await comfyPage.page.evaluate(() => window['changeCount'])).toBe(2)
})
test.describe('Passing through attrs to setting components', () => {
const testCases: Array<{
config: Partial<SettingParams>
selector: string
}> = [
{
config: {
type: 'boolean',
defaultValue: true
},
selector: '.p-toggleswitch.p-component'
},
{
config: {
type: 'number',
defaultValue: 10
},
selector: '.p-inputnumber input'
},
{
config: {
type: 'slider',
defaultValue: 10
},
selector: '.p-slider.p-component'
},
{
config: {
type: 'combo',
defaultValue: 'foo',
options: ['foo', 'bar', 'baz']
},
selector: '.p-select.p-component'
},
{
config: {
type: 'text',
defaultValue: 'Hello'
},
selector: '.p-inputtext'
},
{
config: {
type: 'color',
defaultValue: '#000000'
},
selector: '.p-colorpicker-preview'
}
] as const
for (const { config, selector } of testCases) {
test(`${config.type} component should respect disabled attr`, async ({
comfyPage
}) => {
await comfyPage.page.evaluate((config) => {
window['app'].registerExtension({
name: 'TestExtension1',
settings: [
{
id: 'Comfy.TestSetting',
name: 'Test',
// The `disabled` attr is common to all settings components
attrs: { disabled: true },
...config
}
]
})
}, config)
await comfyPage.settingDialog.open()
const component = comfyPage.settingDialog.root
.getByText('TestSetting Test')
.locator(selector)
const isDisabled = await component.evaluate((el) =>
el.tagName === 'INPUT'
? (el as HTMLInputElement).disabled
: el.classList.contains('p-disabled')
)
expect(isDisabled).toBe(true)
})
}
})
})
test.describe('About panel', () => {

View File

@@ -1,3 +1,4 @@
import type { LGraphNode } from '@comfyorg/litegraph'
import type { APIRequestContext, Locator, Page } from '@playwright/test'
import { expect } from '@playwright/test'
import { test as base } from '@playwright/test'
@@ -278,8 +279,8 @@ export class ComfyPage {
await this.page.addStyleTag({
content: `
* {
font-family: 'Roboto Mono', 'Noto Color Emoji';
}`
font-family: 'Roboto Mono', 'Noto Color Emoji';
}`
})
await this.page.waitForFunction(() => document.fonts.ready)
await this.page.waitForFunction(
@@ -646,6 +647,18 @@ export class ComfyPage {
await this.nextFrame()
}
async selectNodes(nodeTitles: string[]) {
await this.page.keyboard.down('Control')
for (const nodeTitle of nodeTitles) {
const nodes = await this.getNodeRefsByTitle(nodeTitle)
for (const node of nodes) {
await node.click('title')
}
}
await this.page.keyboard.up('Control')
await this.nextFrame()
}
async select2Nodes() {
// Select 2 CLIP nodes.
await this.page.keyboard.down('Control')
@@ -835,12 +848,24 @@ export class ComfyPage {
(
await this.page.evaluate((type) => {
return window['app'].graph.nodes
.filter((n) => n.type === type)
.map((n) => n.id)
.filter((n: LGraphNode) => n.type === type)
.map((n: LGraphNode) => n.id)
}, type)
).map((id: NodeId) => this.getNodeRefById(id))
)
}
async getNodeRefsByTitle(title: string): Promise<NodeReference[]> {
return Promise.all(
(
await this.page.evaluate((title) => {
return window['app'].graph.nodes
.filter((n: LGraphNode) => n.title === title)
.map((n: LGraphNode) => n.id)
}, title)
).map((id: NodeId) => this.getNodeRefById(id))
)
}
async getFirstNodeRef(): Promise<NodeReference | null> {
const id = await this.page.evaluate(() => {
return window['app'].graph.nodes[0]?.id
@@ -885,10 +910,10 @@ export class ComfyPage {
}
export const comfyPageFixture = base.extend<{ comfyPage: ComfyPage }>({
comfyPage: async ({ page, request }, use) => {
comfyPage: async ({ page, request }, use, testInfo) => {
const comfyPage = new ComfyPage(page, request)
const { parallelIndex } = comfyPageFixture.info()
const { parallelIndex } = testInfo
const username = `playwright-test-${parallelIndex}`
const userId = await comfyPage.setupUser(username)
comfyPage.userIds[parallelIndex] = userId
@@ -896,9 +921,10 @@ export const comfyPageFixture = base.extend<{ comfyPage: ComfyPage }>({
try {
await comfyPage.setupSettings({
'Comfy.UseNewMenu': 'Disabled',
// Hide canvas menu/info by default.
// Hide canvas menu/info/selection toolbox by default.
'Comfy.Graph.CanvasInfo': false,
'Comfy.Graph.CanvasMenu': false,
'Comfy.Canvas.SelectionToolbox': false,
// Hide all badges by default.
'Comfy.NodeBadge.NodeIdBadgeMode': NodeBadgeMode.None,
'Comfy.NodeBadge.NodeSourceBadgeMode': NodeBadgeMode.None,

View File

@@ -3,6 +3,10 @@ import { Page } from '@playwright/test'
export class SettingDialog {
constructor(public readonly page: Page) {}
get root() {
return this.page.locator('div.settings-container')
}
async open() {
const button = this.page.locator('button.comfy-settings-btn:visible')
await button.click()

View File

@@ -62,6 +62,9 @@ export class NodeWidgetReference {
readonly node: NodeReference
) {}
/**
* @returns The position of the widget's center
*/
async getPosition(): Promise<Position> {
const pos: [number, number] = await this.node.comfyPage.page.evaluate(
([id, index]) => {
@@ -89,6 +92,22 @@ export class NodeWidgetReference {
position: await this.getPosition()
})
}
async dragHorizontal(delta: number) {
const pos = await this.getPosition()
const canvas = this.node.comfyPage.canvas
const canvasPos = (await canvas.boundingBox())!
this.node.comfyPage.dragAndDrop(
{
x: canvasPos.x + pos.x,
y: canvasPos.y + pos.y
},
{
x: canvasPos.x + pos.x + delta,
y: canvasPos.y + pos.y
}
)
}
}
export class NodeReference {

Binary file not shown.

Before

Width:  |  Height:  |  Size: 88 KiB

After

Width:  |  Height:  |  Size: 87 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 85 KiB

After

Width:  |  Height:  |  Size: 85 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 102 KiB

After

Width:  |  Height:  |  Size: 102 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 100 KiB

After

Width:  |  Height:  |  Size: 100 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 104 KiB

After

Width:  |  Height:  |  Size: 105 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 101 KiB

After

Width:  |  Height:  |  Size: 102 KiB

View File

@@ -35,4 +35,20 @@ test.describe('Keybindings', () => {
true
)
})
test('Should not trigger keybinding reserved by text input when typing in input fields', async ({
comfyPage
}) => {
await comfyPage.registerKeybinding({ key: 'Ctrl+v' }, () => {
window['TestCommand'] = true
})
const textBox = comfyPage.widgetTextBox
await textBox.click()
await textBox.press('Control+v')
await expect(textBox).toBeFocused()
expect(await comfyPage.page.evaluate(() => window['TestCommand'])).toBe(
undefined
)
})
})

View File

@@ -53,4 +53,11 @@ test.describe('Optional input', () => {
await comfyPage.loadWorkflow('simple_slider')
await expect(comfyPage.canvas).toHaveScreenshot('simple_slider.png')
})
test('unknown converted widget', async ({ comfyPage }) => {
await comfyPage.setSetting('Comfy.Workflow.ShowMissingNodesWarning', false)
await comfyPage.loadWorkflow('missing_nodes_converted_widget')
await expect(comfyPage.canvas).toHaveScreenshot(
'missing_nodes_converted_widget.png'
)
})
})

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

After

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

After

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 65 KiB

After

Width:  |  Height:  |  Size: 66 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 63 KiB

After

Width:  |  Height:  |  Size: 65 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 62 KiB

After

Width:  |  Height:  |  Size: 63 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

After

Width:  |  Height:  |  Size: 62 KiB

View File

@@ -30,9 +30,24 @@ test.describe('Remote COMBO Widget', () => {
}, nodeName)
}
const getWidgetValue = async (comfyPage: ComfyPage, nodeName: string) => {
return await comfyPage.page.evaluate((name) => {
const node = window['app'].graph.nodes.find((node) => node.title === name)
return node.widgets[0].value
}, nodeName)
}
const clickRefreshButton = (comfyPage: ComfyPage, nodeName: string) => {
return comfyPage.page.evaluate((name) => {
const node = window['app'].graph.nodes.find((node) => node.title === name)
const buttonWidget = node.widgets.find((w) => w.name === 'refresh')
return buttonWidget?.callback()
}, nodeName)
}
const waitForWidgetUpdate = async (comfyPage: ComfyPage) => {
// Force re-render to trigger first access of widget's options
await comfyPage.page.mouse.click(100, 100)
await comfyPage.page.mouse.click(400, 300)
await comfyPage.page.waitForTimeout(256)
}
@@ -161,6 +176,23 @@ test.describe('Remote COMBO Widget', () => {
})
test.describe('Refresh Behavior', () => {
test('refresh button is visible in selection toolbar when node is selected', async ({
comfyPage
}) => {
await comfyPage.setSetting('Comfy.Canvas.SelectionToolbox', true)
const nodeName = 'Remote Widget Node'
await addRemoteWidgetNode(comfyPage, nodeName)
await waitForWidgetUpdate(comfyPage)
// Select remote widget node
await comfyPage.page.keyboard.press('Control+A')
await expect(
comfyPage.page.locator('.selection-toolbox .pi-refresh')
).toBeVisible()
})
test('refreshes options when TTL expires', async ({ comfyPage }) => {
// Fulfill each request with a unique timestamp
await comfyPage.page.route(
@@ -179,7 +211,7 @@ test.describe('Remote COMBO Widget', () => {
const initialOptions = await getWidgetOptions(comfyPage, nodeName)
// Wait for the refresh (TTL) to expire
await comfyPage.page.waitForTimeout(302)
await comfyPage.page.waitForTimeout(512)
await comfyPage.page.mouse.click(100, 100)
const refreshedOptions = await getWidgetOptions(comfyPage, nodeName)
@@ -221,14 +253,79 @@ test.describe('Remote COMBO Widget', () => {
const nodeName = 'Remote Widget Node'
await addRemoteWidgetNode(comfyPage, nodeName)
await waitForWidgetUpdate(comfyPage)
// Wait for a few retries
await comfyPage.page.waitForTimeout(1024)
// Wait for timeout and backoff, then force re-render, repeat
const requestTimeout = 512
await comfyPage.page.waitForTimeout(requestTimeout)
await waitForWidgetUpdate(comfyPage)
await comfyPage.page.waitForTimeout(requestTimeout * 2)
await waitForWidgetUpdate(comfyPage)
await comfyPage.page.waitForTimeout(requestTimeout * 3)
// Verify exponential backoff between retries
const intervals = timestamps.slice(1).map((t, i) => t - timestamps[i])
expect(intervals[1]).toBeGreaterThan(intervals[0])
})
test('clicking refresh button forces a refresh', async ({ comfyPage }) => {
await comfyPage.page.route(
'**/api/models/checkpoints**',
async (route) => {
await route.fulfill({
body: JSON.stringify([`${Date.now()}`]),
status: 200
})
}
)
const nodeName = 'Remote Widget Node With Refresh Button'
// Trigger initial fetch when adding node to the graph
await addRemoteWidgetNode(comfyPage, nodeName)
await waitForWidgetUpdate(comfyPage)
const initialOptions = await getWidgetOptions(comfyPage, nodeName)
// Click refresh button
await clickRefreshButton(comfyPage, nodeName)
// Verify refresh occurred
const refreshedOptions = await getWidgetOptions(comfyPage, nodeName)
expect(refreshedOptions).not.toEqual(initialOptions)
})
test('control_after_refresh is applied after refresh', async ({
comfyPage
}) => {
const options = [
['first option', 'second option', 'third option'],
['new first option', 'first option', 'second option', 'third option']
]
await comfyPage.page.route(
'**/api/models/checkpoints**',
async (route) => {
const next = options.shift()
await route.fulfill({
body: JSON.stringify(next),
status: 200
})
}
)
const nodeName =
'Remote Widget Node With Refresh Button and Control After Refresh'
// Trigger initial fetch when adding node to the graph
await addRemoteWidgetNode(comfyPage, nodeName)
await waitForWidgetUpdate(comfyPage)
// Click refresh button
await clickRefreshButton(comfyPage, nodeName)
// Verify the selected value of the widget is the first option in the refreshed list
const refreshedValue = await getWidgetValue(comfyPage, nodeName)
expect(refreshedValue).toEqual('new first option')
})
})
test.describe('Cache Behavior', () => {

Binary file not shown.

Before

Width:  |  Height:  |  Size: 98 KiB

After

Width:  |  Height:  |  Size: 98 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 95 KiB

After

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 95 KiB

After

Width:  |  Height:  |  Size: 95 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 92 KiB

After

Width:  |  Height:  |  Size: 92 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 101 KiB

After

Width:  |  Height:  |  Size: 101 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 98 KiB

After

Width:  |  Height:  |  Size: 98 KiB

View File

@@ -0,0 +1,94 @@
import { expect } from '@playwright/test'
import { comfyPageFixture } from './fixtures/ComfyPage'
const test = comfyPageFixture
test.describe('Selection Toolbox', () => {
test.beforeEach(async ({ comfyPage }) => {
await comfyPage.setSetting('Comfy.Canvas.SelectionToolbox', true)
})
test('shows selection toolbox', async ({ comfyPage }) => {
// By default, selection toolbox should be enabled
expect(
await comfyPage.page.locator('.selection-overlay-container').isVisible()
).toBe(false)
// Select multiple nodes
await comfyPage.selectNodes(['KSampler', 'CLIP Text Encode (Prompt)'])
// Selection toolbox should be visible with multiple nodes selected
await expect(
comfyPage.page.locator('.selection-overlay-container')
).toBeVisible()
await expect(
comfyPage.page.locator('.selection-overlay-container.show-border')
).toBeVisible()
})
test('shows border only with multiple selections', async ({ comfyPage }) => {
// Select single node
await comfyPage.selectNodes(['KSampler'])
// Selection overlay should be visible but without border
await expect(
comfyPage.page.locator('.selection-overlay-container')
).toBeVisible()
await expect(
comfyPage.page.locator('.selection-overlay-container.show-border')
).not.toBeVisible()
// Select multiple nodes
await comfyPage.selectNodes(['KSampler', 'CLIP Text Encode (Prompt)'])
// Selection overlay should show border with multiple selections
await expect(
comfyPage.page.locator('.selection-overlay-container.show-border')
).toBeVisible()
// Deselect to single node
await comfyPage.selectNodes(['CLIP Text Encode (Prompt)'])
// Border should be hidden again
await expect(
comfyPage.page.locator('.selection-overlay-container.show-border')
).not.toBeVisible()
})
test('displays refresh button in toolbox when all nodes are selected', async ({
comfyPage
}) => {
// Select all nodes
await comfyPage.page.focus('canvas')
await comfyPage.page.keyboard.press('Control+A')
await expect(
comfyPage.page.locator('.selection-toolbox .pi-refresh')
).toBeVisible()
})
test('displays bypass button in toolbox when nodes are selected', async ({
comfyPage
}) => {
// A group + a KSampler node
await comfyPage.loadWorkflow('single_group')
// Select group + node should show bypass button
await comfyPage.page.focus('canvas')
await comfyPage.page.keyboard.press('Control+A')
await expect(
comfyPage.page.locator(
'.selection-toolbox *[data-testid="bypass-button"]'
)
).toBeVisible()
// Deselect node (Only group is selected) should hide bypass button
await comfyPage.selectNodes(['KSampler'])
await expect(
comfyPage.page.locator(
'.selection-toolbox *[data-testid="bypass-button"]'
)
).not.toBeVisible()
})
})

View File

@@ -40,3 +40,47 @@ test.describe('Boolean widget', () => {
)
})
})
test.describe('Slider widget', () => {
test('Can drag adjust value', async ({ comfyPage }) => {
await comfyPage.loadWorkflow('simple_slider')
await comfyPage.page.waitForTimeout(300)
const node = (await comfyPage.getFirstNodeRef())!
const widget = await node.getWidget(0)
await comfyPage.page.evaluate(() => {
const widget = window['app'].graph.nodes[0].widgets[0]
widget.callback = (value: number) => {
window['widgetValue'] = value
}
})
await widget.dragHorizontal(50)
await expect(comfyPage.canvas).toHaveScreenshot('slider_widget_dragged.png')
expect(
await comfyPage.page.evaluate(() => window['widgetValue'])
).toBeDefined()
})
})
test.describe('Number widget', () => {
test('Can drag adjust value', async ({ comfyPage }) => {
await comfyPage.loadWorkflow('widgets/seed_widget')
await comfyPage.page.waitForTimeout(300)
const node = (await comfyPage.getFirstNodeRef())!
const widget = await node.getWidget(0)
await comfyPage.page.evaluate(() => {
const widget = window['app'].graph.nodes[0].widgets[0]
widget.callback = (value: number) => {
window['widgetValue'] = value
}
})
await widget.dragHorizontal(50)
await expect(comfyPage.canvas).toHaveScreenshot('seed_widget_dragged.png')
expect(
await comfyPage.page.evaluate(() => window['widgetValue'])
).toBeDefined()
})
})

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

29
package-lock.json generated
View File

@@ -1,17 +1,17 @@
{
"name": "@comfyorg/comfyui-frontend",
"version": "1.9.6",
"version": "1.10.6",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@comfyorg/comfyui-frontend",
"version": "1.9.6",
"version": "1.10.6",
"license": "GPL-3.0-only",
"dependencies": {
"@atlaskit/pragmatic-drag-and-drop": "^1.3.1",
"@comfyorg/comfyui-electron-types": "^0.4.16",
"@comfyorg/litegraph": "^0.8.70",
"@comfyorg/comfyui-electron-types": "^0.4.20",
"@comfyorg/litegraph": "^0.8.87",
"@primevue/forms": "^4.2.5",
"@primevue/themes": "^4.2.5",
"@sentry/vue": "^8.48.0",
@@ -85,7 +85,7 @@
"typescript-strict-plugin": "^2.4.4",
"unplugin-icons": "^0.19.3",
"unplugin-vue-components": "^0.27.4",
"vite": "^5.4.6",
"vite": "^5.4.14",
"vite-plugin-dts": "^4.3.0",
"vite-plugin-static-copy": "^1.0.5",
"vitest": "^2.1.9",
@@ -1938,15 +1938,15 @@
"dev": true
},
"node_modules/@comfyorg/comfyui-electron-types": {
"version": "0.4.16",
"resolved": "https://registry.npmjs.org/@comfyorg/comfyui-electron-types/-/comfyui-electron-types-0.4.16.tgz",
"integrity": "sha512-AKy4WLVAuDka/Xjv8zrKwfU/wfRSQpFVE5DgxoLfvroCI0sw+rV1JqdL6xFVrYIoeprzbfKhQiyqlAWU+QgHyg==",
"version": "0.4.20",
"resolved": "https://registry.npmjs.org/@comfyorg/comfyui-electron-types/-/comfyui-electron-types-0.4.20.tgz",
"integrity": "sha512-JFKGk9wSx7CcYh9MRNo7bqTLJwQzVc+1Xg8V2Ghn9BS3RzpmkfktaWHi+waU7/CRQMzvjF+mnDPP58xk1xbVhA==",
"license": "GPL-3.0-only"
},
"node_modules/@comfyorg/litegraph": {
"version": "0.8.70",
"resolved": "https://registry.npmjs.org/@comfyorg/litegraph/-/litegraph-0.8.70.tgz",
"integrity": "sha512-YZSMVzr/gUn7Xoe4orFjypq58faDy1kMvF1/kGTzmukF6w7WZ3tPjkng1ZBAWIztjcGDSmeTLYRayj5hfaDavA==",
"version": "0.8.87",
"resolved": "https://registry.npmjs.org/@comfyorg/litegraph/-/litegraph-0.8.87.tgz",
"integrity": "sha512-hEBe8Cc8C3PkWLfUxxhuO7zitYYCq3dO9mX8DfoK6On8EBE+1UijugVKfTWHuB/Yii4rN8yck/CI9yOYvCuD7Q==",
"license": "MIT"
},
"node_modules/@cspotcode/source-map-support": {
@@ -18509,10 +18509,11 @@
"license": "MIT"
},
"node_modules/vite": {
"version": "5.4.6",
"resolved": "https://registry.npmjs.org/vite/-/vite-5.4.6.tgz",
"integrity": "sha512-IeL5f8OO5nylsgzd9tq4qD2QqI0k2CQLGrWD0rCN0EQJZpBK5vJAx0I+GDkMOXxQX/OfFHMuLIx6ddAxGX/k+Q==",
"version": "5.4.14",
"resolved": "https://registry.npmjs.org/vite/-/vite-5.4.14.tgz",
"integrity": "sha512-EK5cY7Q1D8JNhSaPKVK4pwBFvaTmZxEnoKXLG/U9gmdDcihQGNzFlgIvaxezFR4glP1LsuiedwMBqCXH3wZccA==",
"dev": true,
"license": "MIT",
"dependencies": {
"esbuild": "^0.21.3",
"postcss": "^8.4.43",

View File

@@ -1,7 +1,7 @@
{
"name": "@comfyorg/comfyui-frontend",
"private": true,
"version": "1.9.6",
"version": "1.10.6",
"type": "module",
"repository": "https://github.com/Comfy-Org/ComfyUI_frontend",
"homepage": "https://comfy.org",
@@ -73,7 +73,7 @@
"typescript-strict-plugin": "^2.4.4",
"unplugin-icons": "^0.19.3",
"unplugin-vue-components": "^0.27.4",
"vite": "^5.4.6",
"vite": "^5.4.14",
"vite-plugin-dts": "^4.3.0",
"vite-plugin-static-copy": "^1.0.5",
"vitest": "^2.1.9",
@@ -83,8 +83,8 @@
},
"dependencies": {
"@atlaskit/pragmatic-drag-and-drop": "^1.3.1",
"@comfyorg/comfyui-electron-types": "^0.4.16",
"@comfyorg/litegraph": "^0.8.70",
"@comfyorg/comfyui-electron-types": "^0.4.20",
"@comfyorg/litegraph": "^0.8.87",
"@primevue/forms": "^4.2.5",
"@primevue/themes": "^4.2.5",
"@sentry/vue": "^8.48.0",

View File

@@ -0,0 +1,673 @@
{
"last_node_id": 40,
"last_link_id": 38,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 14
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "ControlNetApply",
"pos": [340, 267],
"size": [317.4, 98],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 10
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 11
},
{
"name": "image",
"type": "IMAGE",
"link": 12
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1280, 704, 1]
},
{
"id": 17,
"type": "VAEDecode",
"pos": [2527, 369],
"size": [210, 46],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 18
},
{
"name": "vae",
"type": "VAE",
"link": 20
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [19],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [-8, 607],
"size": [425.28, 180.61],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 35
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 38
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 13
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 16],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1071823866653712,
"randomize",
10,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 35,
"type": "CLIPTextEncode",
"pos": [1310, -72],
"size": [425.28, 180.61],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 33
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-223, -93],
"size": [422.85, 164.31],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 36
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo) girl (flat chest:0.9), (fennec ears:1.0)\u00a0 (fox ears:1.0), (messy hair), eyes, standing (school uniform sweater) sky clouds nature national park beautiful winter snow (scenery HDR landscape)\n(sunset)\n"
]
},
{
"id": 12,
"type": "LoadImage",
"pos": [-280, 287],
"size": [365, 314],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [12],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["pose_worship.png", "image"]
},
{
"id": 13,
"type": "VAELoader",
"pos": [1098, 599],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [14, 20],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["kl-f8-anime2.ckpt"]
},
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [1321, -395],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 32
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [21],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"girl (flat chest:0.9), (fennec ears:0.8)\u00a0 (fox ears:0.8), (messy hair), (highlights), (realistic starry eyes pupil:1.1), standing (school uniform sweater)\nsky clouds nature national park beautiful winter snow scenery HDR landscape\n\n(sunset)\n\n"
]
},
{
"id": 36,
"type": "CheckpointLoaderSimple",
"pos": [570, -206],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [29],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [34],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AOM3A3.safetensors"]
},
{
"id": 37,
"type": "CLIPSetLastLayer",
"pos": [933, -183],
"size": [315, 58],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 34,
"slot_index": 0
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [32, 33],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 38,
"type": "CLIPSetLastLayer",
"pos": [-733, 375],
"size": [315, 58],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 37
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [35, 36],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 39,
"type": "CheckpointLoaderSimple",
"pos": [-1100, 302],
"size": [315, 98],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [38],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [37],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 14,
"type": "LatentUpscale",
"pos": [1486, 494],
"size": [315, 130],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 16
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1920, 1088, "disabled"]
},
{
"id": 18,
"type": "SaveImage",
"pos": [2769, 370],
"size": [357.86, 262.24],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 19
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 16,
"type": "KSampler",
"pos": [2011, 248],
"size": [315, 262],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 29
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 21
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 28
},
{
"name": "latent_image",
"type": "LATENT",
"link": 17
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
284006177305237,
"randomize",
8,
5,
"dpmpp_sde",
"simple",
0.52
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [353.07, 252.57],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 11,
"type": "ControlNetLoader",
"pos": [-250, 151],
"size": [450.9, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [11],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_openpose_fp16.safetensors"]
},
{
"id": 40,
"type": "MarkdownNote",
"pos": [-1095, 480],
"size": [225, 60],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#2-pass-pose-worship)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 6, 0, 10, 0, "CONDITIONING"],
[11, 11, 0, 10, 1, "CONTROL_NET"],
[12, 12, 0, 10, 2, "IMAGE"],
[13, 10, 0, 3, 1, "CONDITIONING"],
[14, 13, 0, 8, 1, "VAE"],
[16, 3, 0, 14, 0, "LATENT"],
[17, 14, 0, 16, 3, "LATENT"],
[18, 16, 0, 17, 0, "LATENT"],
[19, 17, 0, 18, 0, "IMAGE"],
[20, 13, 0, 17, 1, "VAE"],
[21, 15, 0, 16, 1, "CONDITIONING"],
[28, 35, 0, 16, 2, "CONDITIONING"],
[29, 36, 0, 16, 0, "MODEL"],
[32, 37, 0, 15, 0, "CLIP"],
[33, 37, 0, 35, 0, "CLIP"],
[34, 36, 1, 37, 0, "CLIP"],
[35, 38, 0, 7, 0, "CLIP"],
[36, 38, 0, 6, 0, "CLIP"],
[37, 39, 1, 38, 0, "CLIP"],
[38, 39, 0, 3, 0, "MODEL"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.88,
"offset": [1252.62, 517.93]
}
},
"version": 0.4,
"models": [
{
"name": "control_v11p_sd15_openpose_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_openpose_fp16.safetensors",
"directory": "controlnet"
}
]
}

View File

@@ -0,0 +1,966 @@
{
"last_node_id": 48,
"last_link_id": 113,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1320, 302],
"size": [210, 46],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1575, 350],
"size": [210, 250],
"flags": {},
"order": 26,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [175, 496],
"size": [425.28, 180.61],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 100
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 12,
"type": "ConditioningCombine",
"pos": [834, -246],
"size": [342.6, 46],
"flags": {
"collapsed": false
},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 63
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 57
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAELoader",
"pos": [1041, 544],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [36, 51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 35,
"type": "ConditioningCombine",
"pos": [873, -705],
"size": [342.6, 46],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 61
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [63],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [695, 531],
"size": [315, 106],
"flags": {
"collapsed": false
},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [704, 1280, 1]
},
{
"id": 22,
"type": "LatentUpscale",
"pos": [1412, 79],
"size": [315, 130],
"flags": {},
"order": 25,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 41
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1088, 1920, "disabled"]
},
{
"id": 14,
"type": "CLIPTextEncode",
"pos": [-4, -994],
"size": [400, 200],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [95],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (night:1.3) (darkness) sky (black) (stars:1.2) (galaxy:1.2) (space) (universe)"
]
},
{
"id": 18,
"type": "ConditioningSetArea",
"pos": [482, -709],
"size": [312, 154],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 96
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [62],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 320, 1]
},
{
"id": 31,
"type": "VAEDecode",
"pos": [2783.3, -41],
"size": [210, 46],
"flags": {},
"order": 28,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 50
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [87],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 32,
"type": "SaveImage",
"pos": [3012.3, -42],
"size": [315, 250],
"flags": {},
"order": 29,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 87
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "KSampler",
"pos": [2421.3, -389],
"size": [315, 262],
"flags": {},
"order": 27,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 108
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 47
},
{
"name": "latent_image",
"type": "LATENT",
"link": 42
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [50],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1122440447966177,
"randomize",
14,
7,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [152, 265],
"size": [422.85, 164.31],
"flags": {
"collapsed": false
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 99
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [93],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(masterpiece) (best quality) beautiful landscape breathtaking amazing view nature photograph forest mountains ocean (sky) national park scenery"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [885, 136],
"size": [315, 262],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 107
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 54
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 41],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
335608130539327,
"randomize",
13,
8.5,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 11,
"type": "ConditioningSetArea",
"pos": [479, -454],
"size": [314, 154],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 97,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 512, 1]
},
{
"id": 19,
"type": "ConditioningCombine",
"pos": [1180, -151],
"size": [342.6, 46],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 58
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 94
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [54],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 10,
"type": "ConditioningCombine",
"pos": [803, -149],
"size": [342.6, 46],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 40
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 93
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [94],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 15,
"type": "ConditioningSetArea",
"pos": [466, -233],
"size": [299, 154],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 704, 1]
},
{
"id": 13,
"type": "CLIPTextEncode",
"pos": [-5, -729],
"size": [400, 200],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 103
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [96],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (evening:1.2) (sky:1.2) (clouds) (colorful) (HDR:1.2) (sunset:1.3)\n"
]
},
{
"id": 17,
"type": "CLIPTextEncode",
"pos": [11, -455],
"size": [400, 200],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(best quality) (daytime:1.2) sky (blue)\n"]
},
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [16, -217],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 101
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [98],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(masterpiece) (best quality) morning sky\n\n"]
},
{
"id": 34,
"type": "ConditioningSetArea",
"pos": [476, -932],
"size": [312, 154],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 95,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [61],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 0, 1.2]
},
{
"id": 44,
"type": "CLIPSetLastLayer",
"pos": [-363, 453],
"size": [315, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 106
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [99, 100, 101, 102, 103, 104],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 45,
"type": "CheckpointLoaderSimple",
"pos": [-849, 429],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [107],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [106],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 27,
"type": "CLIPTextEncode",
"pos": [1969.3, -336],
"size": [400, 200],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 113
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 26,
"type": "CLIPTextEncode",
"pos": [1965, -580],
"size": [400, 200],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 112
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) beautiful (HDR:1.2) (realistic:1.2) landscape breathtaking amazing view nature scenery photograph forest mountains ocean daytime night evening morning, (sky:1.2)\n"
]
},
{
"id": 47,
"type": "CLIPSetLastLayer",
"pos": [1569, -403],
"size": [315, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 111
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [112, 113],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 46,
"type": "CheckpointLoaderSimple",
"pos": [1217, -496],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [108],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [111],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AbyssOrangeMix2_hard.safetensors"]
},
{
"id": 48,
"type": "MarkdownNote",
"pos": [-840, 585],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[36, 20, 0, 8, 1, "VAE"],
[40, 15, 0, 10, 0, "CONDITIONING"],
[41, 3, 0, 22, 0, "LATENT"],
[42, 22, 0, 24, 3, "LATENT"],
[46, 26, 0, 24, 1, "CONDITIONING"],
[47, 27, 0, 24, 2, "CONDITIONING"],
[49, 8, 0, 9, 0, "IMAGE"],
[50, 24, 0, 31, 0, "LATENT"],
[51, 20, 0, 31, 1, "VAE"],
[54, 19, 0, 3, 1, "CONDITIONING"],
[57, 11, 0, 12, 1, "CONDITIONING"],
[58, 12, 0, 19, 0, "CONDITIONING"],
[61, 34, 0, 35, 0, "CONDITIONING"],
[62, 18, 0, 35, 1, "CONDITIONING"],
[63, 35, 0, 12, 0, "CONDITIONING"],
[87, 31, 0, 32, 0, "IMAGE"],
[93, 6, 0, 10, 1, "CONDITIONING"],
[94, 10, 0, 19, 1, "CONDITIONING"],
[95, 14, 0, 34, 0, "CONDITIONING"],
[96, 13, 0, 18, 0, "CONDITIONING"],
[97, 17, 0, 11, 0, "CONDITIONING"],
[98, 33, 0, 15, 0, "CONDITIONING"],
[99, 44, 0, 6, 0, "CLIP"],
[100, 44, 0, 7, 0, "CLIP"],
[101, 44, 0, 33, 0, "CLIP"],
[102, 44, 0, 17, 0, "CLIP"],
[103, 44, 0, 13, 0, "CLIP"],
[104, 44, 0, 14, 0, "CLIP"],
[106, 45, 1, 44, 0, "CLIP"],
[107, 45, 0, 3, 0, "MODEL"],
[108, 46, 0, 24, 0, "MODEL"],
[111, 46, 1, 47, 0, "CLIP"],
[112, 47, 0, 26, 0, "CLIP"],
[113, 47, 0, 27, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.44,
"offset": [1558.38, 1652.18]
}
},
"version": 0.4,
"models": [
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,967 @@
{
"last_node_id": 48,
"last_link_id": 114,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1320, 302],
"size": [210, 46],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1575, 350],
"size": [210, 58],
"flags": {},
"order": 26,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [175, 496],
"size": [425.28, 180.61],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 106
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 12,
"type": "ConditioningCombine",
"pos": [834, -246],
"size": [342.6, 46],
"flags": {
"collapsed": false
},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 63
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 57
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAELoader",
"pos": [1041, 544],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [36, 51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 35,
"type": "ConditioningCombine",
"pos": [873, -705],
"size": [342.6, 46],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 61
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [63],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [695, 531],
"size": [315, 106],
"flags": {
"collapsed": false
},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [704, 1280, 1]
},
{
"id": 22,
"type": "LatentUpscale",
"pos": [1412, 79],
"size": [315, 130],
"flags": {},
"order": 25,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 41
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1088, 1920, "disabled"]
},
{
"id": 14,
"type": "CLIPTextEncode",
"pos": [-4, -994],
"size": [400, 200],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 110
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [89],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (night:1.3) (darkness) sky (black) (stars:1.2) (galaxy:1.2) (space) (universe)"
]
},
{
"id": 13,
"type": "CLIPTextEncode",
"pos": [-5, -729],
"size": [400, 200],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 109
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [91],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (evening:1.2) (sky:1.2) (clouds) (colorful) (HDR:1.2) (sunset:1.3)\n"
]
},
{
"id": 17,
"type": "CLIPTextEncode",
"pos": [11, -455],
"size": [400, 200],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 108
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [90],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(best quality) (daytime:1.2) sky (blue)\n"]
},
{
"id": 18,
"type": "ConditioningSetArea",
"pos": [482, -709],
"size": [312, 154],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 90
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [62],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 320, 1]
},
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [16, -217],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 107
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [92],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(masterpiece) (best quality) morning sky\n\n"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [152, 265],
"size": [422.85, 164.31],
"flags": {
"collapsed": false
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 105
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [93],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(masterpiece) (best quality) beautiful landscape breathtaking amazing view nature photograph forest mountains ocean (sky) national park scenery"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [885, 136],
"size": [315, 262],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 104
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 54
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 41],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
823155751257884,
"randomize",
13,
8.5,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 11,
"type": "ConditioningSetArea",
"pos": [479, -454],
"size": [314, 154],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 91,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 512, 1]
},
{
"id": 19,
"type": "ConditioningCombine",
"pos": [1180, -151],
"size": [342.6, 46],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 58
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 94
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [54],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 10,
"type": "ConditioningCombine",
"pos": [803, -149],
"size": [342.6, 46],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 40
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 93
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [94],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 34,
"type": "ConditioningSetArea",
"pos": [476, -932],
"size": [312, 154],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 92,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [61],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 0, 1]
},
{
"id": 15,
"type": "ConditioningSetArea",
"pos": [466, -233],
"size": [299, 154],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 89
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 704, 1.5]
},
{
"id": 44,
"type": "CheckpointLoaderSimple",
"pos": [-703, 444],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [104],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [111],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 46,
"type": "CLIPSetLastLayer",
"pos": [-354, 244],
"size": [315, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 111,
"slot_index": 0
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [105, 106, 107, 108, 109, 110],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 24,
"type": "KSampler",
"pos": [2220, -398],
"size": [315, 262],
"flags": {},
"order": 27,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 95
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 47
},
{
"name": "latent_image",
"type": "LATENT",
"link": 42
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [50],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
418330692116968,
"randomize",
14,
7,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 32,
"type": "SaveImage",
"pos": [2825, -62],
"size": [315, 58],
"flags": {},
"order": 29,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 87
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 31,
"type": "VAEDecode",
"pos": [2590, -61],
"size": [210, 46],
"flags": {},
"order": 28,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 50
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [87],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 26,
"type": "CLIPTextEncode",
"pos": [1781, -571],
"size": [400, 200],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 113
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) beautiful (HDR:1.2) (realistic:1.2) landscape breathtaking amazing view nature scenery photograph forest mountains ocean daytime night evening morning, (sky:1.2)\n"
]
},
{
"id": 27,
"type": "CLIPTextEncode",
"pos": [1787, -317],
"size": [400, 200],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 114
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 47,
"type": "CLIPSetLastLayer",
"pos": [1407, -402],
"size": [315, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 112
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [113, 114],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 45,
"type": "CheckpointLoaderSimple",
"pos": [1074, -444],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [95],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [112],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AbyssOrangeMix2_hard.safetensors"]
},
{
"id": 48,
"type": "MarkdownNote",
"pos": [-690, 615],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[36, 20, 0, 8, 1, "VAE"],
[40, 15, 0, 10, 0, "CONDITIONING"],
[41, 3, 0, 22, 0, "LATENT"],
[42, 22, 0, 24, 3, "LATENT"],
[46, 26, 0, 24, 1, "CONDITIONING"],
[47, 27, 0, 24, 2, "CONDITIONING"],
[49, 8, 0, 9, 0, "IMAGE"],
[50, 24, 0, 31, 0, "LATENT"],
[51, 20, 0, 31, 1, "VAE"],
[54, 19, 0, 3, 1, "CONDITIONING"],
[57, 11, 0, 12, 1, "CONDITIONING"],
[58, 12, 0, 19, 0, "CONDITIONING"],
[61, 34, 0, 35, 0, "CONDITIONING"],
[62, 18, 0, 35, 1, "CONDITIONING"],
[63, 35, 0, 12, 0, "CONDITIONING"],
[87, 31, 0, 32, 0, "IMAGE"],
[89, 14, 0, 15, 0, "CONDITIONING"],
[90, 17, 0, 18, 0, "CONDITIONING"],
[91, 13, 0, 11, 0, "CONDITIONING"],
[92, 33, 0, 34, 0, "CONDITIONING"],
[93, 6, 0, 10, 1, "CONDITIONING"],
[94, 10, 0, 19, 1, "CONDITIONING"],
[95, 45, 0, 24, 0, "MODEL"],
[104, 44, 0, 3, 0, "MODEL"],
[105, 46, 0, 6, 0, "CLIP"],
[106, 46, 0, 7, 0, "CLIP"],
[107, 46, 0, 33, 0, "CLIP"],
[108, 46, 0, 17, 0, "CLIP"],
[109, 46, 0, 13, 0, "CLIP"],
[110, 46, 0, 14, 0, "CLIP"],
[111, 44, 1, 46, 0, "CLIP"],
[112, 45, 1, 47, 0, "CLIP"],
[113, 47, 0, 26, 0, "CLIP"],
[114, 47, 0, 27, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.79,
"offset": [1022.96, -230.7]
}
},
"version": 0.4,
"models": [
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,620 @@
{
"last_node_id": 50,
"last_link_id": 108,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1320, 302],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [175, 496],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 101
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 27,
"type": "CLIPTextEncode",
"pos": [1570, -336],
"size": [400, 200],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 103
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 22,
"type": "LatentUpscale",
"pos": [1412, 79],
"size": [315, 130],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 41
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1920, 1088, "disabled"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [695, 531],
"size": [315, 106],
"flags": {
"collapsed": false
},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1280, 704, 1]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1556, 303],
"size": [210, 250],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [156, 269],
"size": [422.85, 164.31],
"flags": {
"collapsed": false
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [98],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo:1.3) (best quality) (HDR:1.0) colourful, nature wilderness snow mountain peak, (winter:1.2), on landscape mountain in Switzerland alps sunset, aerial view (cityscape:1.3) skyscrapers modern city satellite view, (sunset)\ngirl with fennec ears fox ears, sweater, sitting\n"
]
},
{
"id": 47,
"type": "ConditioningCombine",
"pos": [530, 71],
"size": [342.6, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 97
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [99],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 45,
"type": "CLIPTextEncode",
"pos": [-88, -224],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 105,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [93],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo:1.3) (best quality) (HDR:1.0) girl colourful of (flat chest:0.9), (fennec ears:1.0)\u00a0 (fox ears:1.0), blonde twintails medium (messy hair:1.2), (eyes:1.0), sweater, (pink:0.8) , long sleeves, sweatpants (pants), gloves, nature wilderness (sitting:1.3) on snow mountain peak, (:d:0.5) (blush:0.9), (winter:1.2), on landscape mountain in Switzerland alps sunset, comfortable, (spread legs:1.1), aerial view (cityscape:1.3) skyscrapers modern city satellite view, (sunset)"
]
},
{
"id": 31,
"type": "VAEDecode",
"pos": [2419, 10],
"size": [210, 46],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 50
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 46,
"type": "ConditioningSetArea",
"pos": [344, -227],
"size": [317.4, 154],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 93
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [640, 640, 0, 64, 1]
},
{
"id": 26,
"type": "CLIPTextEncode",
"pos": [1573, -583],
"size": [400, 200],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece solo (realistic) (best quality) (HDR:1.0) girl colourful of (flat chest:0.9), (fox ears:0.9), blonde twintails messy hair, (eyes:1.0), sweater, (pink:0.8) , long sleeves, sweatpants pants, gloves, nature wilderness sitting on snow mountain peak aerial view, (:d:0.5) (blush:0.9), (winter:0.9), mountain in Switzerland, comfortable, aerial view (cityscape:1.2) skyscrapers modern city satellite view, (sunset)\n"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [885, 136],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 106
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 99
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 41],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
830459492315490,
"randomize",
13,
7,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 32,
"type": "SaveImage",
"pos": [2648, -11],
"size": [210, 250],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 100
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "KSampler",
"pos": [2047, -270],
"size": [315, 262],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 107
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 47
},
{
"name": "latent_image",
"type": "LATENT",
"link": 42
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [50],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
626842672818096,
"randomize",
7,
5,
"dpmpp_sde",
"simple",
0.52
]
},
{
"id": 20,
"type": "VAELoader",
"pos": [1086, 563],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [36, 51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 49,
"type": "CLIPSetLastLayer",
"pos": [-227, 630],
"size": [315, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 108
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [101, 102, 103, 104, 105],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 48,
"type": "CheckpointLoaderSimple",
"pos": [-621, 603],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [106, 107],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [108],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 50,
"type": "MarkdownNote",
"pos": [-615, 765],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/#increasing-consistency-of-images-with-area-composition)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[36, 20, 0, 8, 1, "VAE"],
[41, 3, 0, 22, 0, "LATENT"],
[42, 22, 0, 24, 3, "LATENT"],
[46, 26, 0, 24, 1, "CONDITIONING"],
[47, 27, 0, 24, 2, "CONDITIONING"],
[49, 8, 0, 9, 0, "IMAGE"],
[50, 24, 0, 31, 0, "LATENT"],
[51, 20, 0, 31, 1, "VAE"],
[93, 45, 0, 46, 0, "CONDITIONING"],
[97, 46, 0, 47, 0, "CONDITIONING"],
[98, 6, 0, 47, 1, "CONDITIONING"],
[99, 47, 0, 3, 1, "CONDITIONING"],
[100, 31, 0, 32, 0, "IMAGE"],
[101, 49, 0, 7, 0, "CLIP"],
[102, 49, 0, 6, 0, "CLIP"],
[103, 49, 0, 27, 0, "CLIP"],
[104, 49, 0, 26, 0, "CLIP"],
[105, 49, 0, 45, 0, "CLIP"],
[106, 48, 0, 3, 0, "MODEL"],
[107, 48, 0, 24, 0, "MODEL"],
[108, 48, 1, 49, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.53,
"offset": [1214.17, 1188.8]
}
},
"version": 0.4,
"models": [
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,385 @@
{
"last_node_id": 15,
"last_link_id": 21,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1210, 250],
"size": [210, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 14
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-42, -147],
"size": [422.85, 164.31],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 21
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo) girl (flat chest:0.9), (fennec ears:1.1)\u00a0 (fox ears:1.1), (blonde hair:1.0), messy hair, sky clouds, standing in a grass field, (chibi), blue eyes"
]
},
{
"id": 12,
"type": "ControlNetLoader",
"pos": [-50, 69],
"size": [422, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_scribble_fp16.safetensors"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [355, 213],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [16],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis)"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [439, 446],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 13,
"type": "VAELoader",
"pos": [833, 484],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [14],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 10,
"type": "ControlNetApply",
"pos": [459, 51],
"size": [317.4, 98],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 10
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 13
},
{
"name": "image",
"type": "IMAGE",
"link": 12
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [0.9]
},
{
"id": 11,
"type": "LoadImage",
"pos": [-70, 177],
"size": [387.97, 465.51],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [12],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["input_scribble_example.png", "image"]
},
{
"id": 14,
"type": "CheckpointLoaderSimple",
"pos": [-448, 231],
"size": [315, 98],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [19],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [20, 21],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1453, 247],
"size": [393.62, 449.16],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [842, 150],
"size": [315, 262],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 19
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 18
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 16
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1002496614778823,
"randomize",
16,
6,
"uni_pc",
"normal",
1
]
},
{
"id": 15,
"type": "MarkdownNote",
"pos": [-450, 375],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 6, 0, 10, 0, "CONDITIONING"],
[12, 11, 0, 10, 2, "IMAGE"],
[13, 12, 0, 10, 1, "CONTROL_NET"],
[14, 13, 0, 8, 1, "VAE"],
[16, 7, 0, 3, 2, "CONDITIONING"],
[18, 10, 0, 3, 1, "CONDITIONING"],
[19, 14, 0, 3, 0, "MODEL"],
[20, 14, 1, 7, 0, "CLIP"],
[21, 14, 1, 6, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.8,
"offset": [843.77, 555.93]
}
},
"version": 0.4,
"models": [
{
"name": "control_v11p_sd15_scribble_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_scribble_fp16.safetensors?download=true",
"directory": "controlnet"
},
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,378 @@
{
"last_node_id": 35,
"last_link_id": 52,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1053, 172],
"size": [210, 46],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 23,
"type": "ControlNetApply",
"pos": [593.6, -388.0],
"size": [317.4, 98],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 42
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 47
},
{
"name": "image",
"type": "IMAGE",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1.0]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [259, 463],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [832, 384, 1]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [169, 212],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 49
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2)"
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1310, 169],
"size": [516.05, 301.24],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 33,
"type": "DiffControlNetLoader",
"pos": [131, -338],
"size": [421.93, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 48
}
],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DiffControlNetLoader"
},
"widgets_values": ["diff_control_sd15_depth_fp16.safetensors"]
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-305, -435],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 50
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["underwater photograph shark\n\n\n\n"]
},
{
"id": 3,
"type": "KSampler",
"pos": [699, 167],
"size": [315, 262],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 52
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
891858402356003,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 20,
"type": "LoadImage",
"pos": [135, -234],
"size": [429.73, 314],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [34],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["shark_depthmap.png", "image"]
},
{
"id": 34,
"type": "CheckpointLoaderSimple",
"pos": [-281, 110],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [48, 52],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [49, 50],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [51],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 35,
"type": "MarkdownNote",
"pos": [-270, 255],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#t2i-adapter-vs-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[34, 20, 0, 23, 2, "IMAGE"],
[40, 23, 0, 3, 1, "CONDITIONING"],
[42, 24, 0, 23, 0, "CONDITIONING"],
[47, 33, 0, 23, 1, "CONTROL_NET"],
[48, 34, 0, 33, 0, "MODEL"],
[49, 34, 1, 7, 0, "CLIP"],
[50, 34, 1, 24, 0, "CLIP"],
[51, 34, 2, 8, 1, "VAE"],
[52, 34, 0, 3, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Apply Depth ControlNet",
"bounding": [210, -480, 739, 336],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.86,
"offset": [671.97, 711.84]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "diff_control_sd15_depth_fp16.safetensors",
"url": "https://huggingface.co/kohya-ss/ControlNet-diff-modules/resolve/main/diff_control_sd15_depth_fp16.safetensors?download=true",
"directory": "controlnet"
}
]
}

View File

@@ -0,0 +1,371 @@
{
"last_node_id": 34,
"last_link_id": 49,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1053, 172],
"size": [210, 46],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 49
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [259, 463],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [832, 384, 1]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [169, 212],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 47
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2)"
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1310, 169],
"size": [516.05, 301.24],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [699, 167],
"size": [315, 262],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 46
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
891858402356003,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 23,
"type": "ControlNetApply",
"pos": [553, -289],
"size": [317.4, 98],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 42
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 44
},
{
"name": "image",
"type": "IMAGE",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1.0]
},
{
"id": 31,
"type": "ControlNetLoader",
"pos": [168, -286],
"size": [345, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [44],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["t2iadapter_depth_sd14v1.pth"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [88, -174],
"size": [413, 314],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [34],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["shark_depthmap.png", "image"]
},
{
"id": 33,
"type": "CheckpointLoaderSimple",
"pos": [-349, 161],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [46],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [47, 48],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [49],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-360, -261],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 48
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["underwater photograph shark\n\n\n\n"]
},
{
"id": 34,
"type": "MarkdownNote",
"pos": [-345, 300],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#t2i-adapter-vs-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[34, 20, 0, 23, 2, "IMAGE"],
[40, 23, 0, 3, 1, "CONDITIONING"],
[42, 24, 0, 23, 0, "CONDITIONING"],
[44, 31, 0, 23, 1, "CONTROL_NET"],
[46, 33, 0, 3, 0, "MODEL"],
[47, 33, 1, 7, 0, "CLIP"],
[48, 33, 1, 24, 0, "CLIP"],
[49, 33, 2, 8, 1, "VAE"]
],
"groups": [
{
"id": 1,
"title": "Apply Depth T2I-Adapter",
"bounding": [150, -375, 739, 336],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.84,
"offset": [737.68, 680.26]
}
},
"version": 0.4,
"models": [
{
"name": "t2iadapter_depth_sd14v1.pth",
"url": "https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_depth_sd14v1.pth?download=true",
"directory": "controlnet"
},
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,267 @@
{
"last_node_id": 10,
"last_link_id": 9,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph in the style of embedding:SDA768.pt girl with blonde hair\nlandscape scenery view"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [26, 474],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [469, 528],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
193694018275622,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 10,
"type": "MarkdownNote",
"pos": [30, 630],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/textual_inversion_embeddings/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[3, 4, 1, 6, 0, "CLIP"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[5, 4, 1, 7, 0, "CLIP"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.84,
"offset": [498.31, 149.5]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,329 @@
{
"last_node_id": 15,
"last_link_id": 19,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 18
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 17
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece best quality girl standing in victorian clothing"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 19
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1791, 169],
"size": [455.99, 553.09],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 16
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 14,
"type": "ImageUpscaleWithModel",
"pos": [1506, 151],
"size": [241.8, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "upscale_model",
"type": "UPSCALE_MODEL",
"link": 14
},
{
"name": "image",
"type": "IMAGE",
"link": 15
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [16],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageUpscaleWithModel"
},
"widgets_values": []
},
{
"id": 13,
"type": "UpscaleModelLoader",
"pos": [1128, 51],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "UPSCALE_MODEL",
"type": "UPSCALE_MODEL",
"links": [14],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UpscaleModelLoader"
},
"widgets_values": ["RealESRGAN_x4plus.pth"]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
833543590226030,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-11, 307],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [17, 18],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [19],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 15,
"type": "MarkdownNote",
"pos": [0, 465],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/upscale_models/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[14, 13, 0, 14, 0, "UPSCALE_MODEL"],
[15, 8, 0, 14, 1, "IMAGE"],
[16, 14, 0, 9, 0, "IMAGE"],
[17, 4, 1, 6, 0, "CLIP"],
[18, 4, 1, 7, 0, "CLIP"],
[19, 4, 2, 8, 1, "VAE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.82,
"offset": [400.67, 431.06]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "RealESRGAN_x4plus.pth",
"url": "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth",
"directory": "upscale_models"
}
]
}

View File

@@ -0,0 +1,474 @@
{
"last_node_id": 36,
"last_link_id": 70,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [68],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 17,
"type": "LoadImage",
"pos": [220, 530],
"size": [315, 314.0],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [49],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 19,
"type": "PreviewImage",
"pos": [899, 532],
"size": [571.59, 625.53],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 26
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [1290, 40],
"size": [315, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 57
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 64
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 65
},
{
"name": "latent_image",
"type": "LATENT",
"link": 66
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
50363905047731,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 35,
"type": "InstructPixToPixConditioning",
"pos": [1040, 50],
"size": [235.2, 86],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 67
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 68
},
{
"name": "vae",
"type": "VAE",
"link": 69
},
{
"name": "pixels",
"type": "IMAGE",
"link": 70
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [64],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [65],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [66],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InstructPixToPixConditioning"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 40],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1850, 40],
"size": [828.95, 893.85],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 32,
"type": "VAELoader",
"pos": [1290, 350],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 69],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [700, 50],
"size": [317.4, 58],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [67],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [30]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [260, 50],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a pink sweater and jeans"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-80, 110],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [710, -80],
"size": [315, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-canny-dev.safetensors", "default"]
},
{
"id": 18,
"type": "Canny",
"pos": [560, 530],
"size": [315, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 49
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [26, 70],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "Canny"
},
"widgets_values": [0.15, 0.3]
},
{
"id": 36,
"type": "MarkdownNote",
"pos": [-75, 270],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#canny-and-depth)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[26, 18, 0, 19, 0, "IMAGE"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[49, 17, 0, 18, 0, "IMAGE"],
[57, 31, 0, 3, 0, "MODEL"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[64, 35, 0, 3, 1, "CONDITIONING"],
[65, 35, 1, 3, 2, "CONDITIONING"],
[66, 35, 2, 3, 3, "LATENT"],
[67, 26, 0, 35, 0, "CONDITIONING"],
[68, 7, 0, 35, 1, "CONDITIONING"],
[69, 32, 0, 35, 2, "VAE"],
[70, 18, 0, 35, 3, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.67,
"offset": [553.16, 455.34]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-canny-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Canny-dev/resolve/main/flux1-canny-dev.safetensors?download=true",
"directory": "diffusion_models"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
}
]
}

View File

@@ -0,0 +1,454 @@
{
"last_node_id": 40,
"last_link_id": 76,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 98],
"size": [210, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [68],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-238, 112],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 17,
"type": "LoadImage",
"pos": [307, 342],
"size": [315, 314.0],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [71],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["shark_depthmap.png", "image"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [621, 8],
"size": [317.4, 58],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [67],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [10]
},
{
"id": 35,
"type": "InstructPixToPixConditioning",
"pos": [1018, 124],
"size": [235.2, 86],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 67
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 68
},
{
"name": "vae",
"type": "VAE",
"link": 69
},
{
"name": "pixels",
"type": "IMAGE",
"link": 71
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [64],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [65],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [73],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InstructPixToPixConditioning"
},
"widgets_values": []
},
{
"id": 32,
"type": "VAELoader",
"pos": [656, 165],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 69],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1865, 98],
"size": [722.41, 425.77],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 37,
"type": "LoraLoaderModelOnly",
"pos": [624, -172],
"size": [315, 82],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 74
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [76],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": ["flux1-depth-dev-lora.safetensors", 1]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [115, -17],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["a photograph of a shark in the sea"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 3,
"type": "KSampler",
"pos": [1280, 100],
"size": [315, 262],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 76
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 64
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 65
},
{
"name": "latent_image",
"type": "LATENT",
"link": 73
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
91050358797301,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [249, -171],
"size": [315, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [74],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-depth-dev.safetensors", "default"]
},
{
"id": 40,
"type": "MarkdownNote",
"pos": [-225, 270],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#canny-and-depth)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[64, 35, 0, 3, 1, "CONDITIONING"],
[65, 35, 1, 3, 2, "CONDITIONING"],
[67, 26, 0, 35, 0, "CONDITIONING"],
[68, 7, 0, 35, 1, "CONDITIONING"],
[69, 32, 0, 35, 2, "VAE"],
[71, 17, 0, 35, 3, "IMAGE"],
[73, 35, 2, 3, 3, "LATENT"],
[74, 31, 0, 37, 0, "MODEL"],
[76, 37, 0, 3, 0, "MODEL"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.65,
"offset": [724.57, 776.23]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-depth-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Depth-dev/resolve/main/flux1-depth-dev.safetensors?download=true",
"directory": "diffusion_models"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "flux1-depth-dev-lora.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Depth-dev-lora/resolve/main/flux1-depth-dev-lora.safetensors?download=true",
"directory": "loras"
}
]
}

View File

@@ -0,0 +1,332 @@
{
"last_node_id": 37,
"last_link_id": 57,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 192],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 45
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [56],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a maid outfit with a long black gold leaf pattern dress and a white apron mouth open placing a fancy black forest cake with candles on top of a dinner table of an old dark Victorian mansion lit by candlelight with a bright window to the foggy forest and very expensive stuff everywhere there are paintings on the walls"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1151, 195],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 52
},
{
"name": "vae",
"type": "VAE",
"link": 46
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1375, 194],
"size": [985.3, 1060.38],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [471, 455],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 30,
"type": "CheckpointLoaderSimple",
"pos": [48, 192],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [47],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [45, 54],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [46],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["flux1-dev-fp8.safetensors"]
},
{
"id": 31,
"type": "KSampler",
"pos": [816, 192],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 47
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 57
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 55
},
{
"name": "latent_image",
"type": "LATENT",
"link": 51
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [52],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
972054013131368,
"randomize",
20,
1,
"euler",
"simple",
1
]
},
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [390, 400],
"size": [422.85, 164.31],
"flags": {
"collapsed": true
},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 54,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [55],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 35,
"type": "FluxGuidance",
"pos": [576, 96],
"size": [211.6, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 56
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [3.5]
},
{
"id": 37,
"type": "MarkdownNote",
"pos": [60, 345],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-dev-1)"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 34,
"type": "Note",
"pos": [825, 510],
"size": [282.86, 164.08],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"Note that Flux dev and schnell do not have any negative prompt so CFG should be set to 1.0. Setting CFG to 1.0 means the negative prompt is ignored."
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[45, 30, 1, 6, 0, "CLIP"],
[46, 30, 2, 8, 1, "VAE"],
[47, 30, 0, 31, 0, "MODEL"],
[51, 27, 0, 31, 3, "LATENT"],
[52, 31, 0, 8, 0, "LATENT"],
[54, 30, 1, 33, 0, "CLIP"],
[55, 33, 0, 31, 2, "CONDITIONING"],
[56, 6, 0, 35, 0, "CONDITIONING"],
[57, 35, 0, 31, 1, "CONDITIONING"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.8,
"offset": [350.72, 161.55]
}
},
"version": 0.4,
"models": [
{
"name": "flux1-dev-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-dev/resolve/main/flux1-dev-fp8.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,771 @@
{
"last_node_id": 38,
"last_link_id": 116,
"nodes": [
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [48, 288],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"t5xxl_fp16.safetensors",
"clip_l.safetensors",
"flux",
"default"
]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [480, 1008],
"size": [315, 106],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 55,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [20]
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": ["simple", 20, 1]
},
{
"id": 16,
"type": "KSamplerSelect",
"pos": [480, 912],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [19]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [480, 144],
"size": [317.4, 58],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [3.5],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 22,
"type": "BasicGuider",
"pos": [576, 48],
"size": [222.35, 46],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 42,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [864, 192],
"size": [272.36, 124.54],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 116,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [24],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 25,
"type": "RandomNoise",
"pos": [480, 768],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [219670278747233, "randomize"],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [866, 367],
"size": [210, 46],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 24
},
{
"name": "vae",
"type": "VAE",
"link": 12
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 240],
"size": [422.85, 164.31],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 10
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a maid outfit with a long black gold leaf pattern dress and a white apron mouth open holding a fancy black forest cake with candles on top in the kitchen of an old dark Victorian mansion lit by candlelight with a bright window to the foggy forest and very expensive stuff everywhere"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 30,
"type": "ModelSamplingFlux",
"pos": [480, 1152],
"size": [315, 130],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56,
"slot_index": 0
},
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 115,
"slot_index": 1
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 114,
"slot_index": 2
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [54, 55],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingFlux"
},
"widgets_values": [1.15, 0.5, 1024, 1024]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": [315, 106],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 112
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 113
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [116],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 34,
"type": "PrimitiveNode",
"pos": [432, 480],
"size": [210, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "width"
},
"links": [112, 115],
"slot_index": 0
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 35,
"type": "PrimitiveNode",
"pos": [672, 480],
"size": [210, 82],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "height"
},
"links": [113, 114],
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [48, 144],
"size": [315, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [56],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-dev.safetensors", "default"],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 9,
"type": "SaveImage",
"pos": [1155, 196],
"size": [985.3, 1060.38],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 37,
"type": "Note",
"pos": [480, 1344],
"size": [315.0, 117.98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"The reference sampling implementation auto adjusts the shift value based on the resolution, if you don't want this you can just bypass (CTRL-B) this ModelSamplingFlux node.\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "VAELoader",
"pos": [48, 432],
"size": [311.82, 60.43],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 28,
"type": "Note",
"pos": [48, 576],
"size": [336, 288],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"If you get an error in any of the nodes above make sure the files are in the correct directories.\n\nSee the top of the examples page for the links : https://comfyanonymous.github.io/ComfyUI_examples/flux/\n\nflux1-dev.safetensors goes in: ComfyUI/models/unet/\n\nt5xxl_fp16.safetensors and clip_l.safetensors go in: ComfyUI/models/clip/\n\nae.safetensors goes in: ComfyUI/models/vae/\n\n\nTip: You can set the weight_dtype above to one of the fp8 types if you have memory issues."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 38,
"type": "MarkdownNote",
"pos": [45, 930],
"size": [225, 60],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-dev-1)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[10, 11, 0, 6, 0, "CLIP"],
[12, 10, 0, 8, 1, "VAE"],
[19, 16, 0, 13, 2, "SAMPLER"],
[20, 17, 0, 13, 3, "SIGMAS"],
[24, 13, 0, 8, 0, "LATENT"],
[30, 22, 0, 13, 1, "GUIDER"],
[37, 25, 0, 13, 0, "NOISE"],
[41, 6, 0, 26, 0, "CONDITIONING"],
[42, 26, 0, 22, 1, "CONDITIONING"],
[54, 30, 0, 22, 0, "MODEL"],
[55, 30, 0, 17, 0, "MODEL"],
[56, 12, 0, 30, 0, "MODEL"],
[112, 34, 0, 27, 0, "INT"],
[113, 35, 0, 27, 1, "INT"],
[114, 35, 0, 30, 2, "INT"],
[115, 34, 0, 30, 1, "INT"],
[116, 27, 0, 13, 4, "LATENT"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [-0.18, 2.29]
},
"groupNodes": {
"EmptyLatentImage": {
"nodes": [
{
"type": "PrimitiveNode",
"pos": [432, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 6,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"widget": {
"name": "height"
},
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 0
},
{
"type": "PrimitiveNode",
"pos": [672, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 7,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"slot_index": 0,
"widget": {
"name": "width"
}
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 1
},
{
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"link": null,
"widget": {
"name": "width"
}
},
{
"name": "height",
"type": "INT",
"link": null,
"widget": {
"name": "height"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"index": 2
}
],
"links": [
[1, 0, 2, 0, 34, "INT"],
[0, 0, 2, 1, 35, "INT"]
],
"external": [
[0, 0, "INT"],
[1, 0, "INT"],
[2, 0, "LATENT"]
],
"config": {
"0": {
"output": {
"0": {
"name": "height"
}
},
"input": {
"value": {
"visible": true
}
}
},
"1": {
"output": {
"0": {
"name": "width"
}
},
"input": {
"value": {
"visible": true
}
}
},
"2": {
"input": {
"width": {
"visible": false
},
"height": {
"visible": false
}
}
}
}
}
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/flux1-dev.safetensors?download=true",
"directory": "diffusion_models"
}
]
}

View File

@@ -0,0 +1,458 @@
{
"last_node_id": 45,
"last_link_id": 100,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [81],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 32,
"type": "VAELoader",
"pos": [1352, 422],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 82],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [593, 44],
"size": [317.4, 58],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [80],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [30]
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-237, 79],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 39,
"type": "DifferentialDiffusion",
"pos": [1001, -68],
"size": [277.2, 26],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 85
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [86],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DifferentialDiffusion"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 98],
"size": [210, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 38,
"type": "InpaintModelConditioning",
"pos": [952, 78],
"size": [302.4, 138],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 80
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 81
},
{
"name": "vae",
"type": "VAE",
"link": 82
},
{
"name": "pixels",
"type": "IMAGE",
"link": 99
},
{
"name": "mask",
"type": "MASK",
"link": 100
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [77],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [78],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [88],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InpaintModelConditioning"
},
"widgets_values": [false]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1877, 101],
"size": [828.95, 893.85],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 95
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [1280, 100],
"size": [315, 262],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 86
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 77
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 78
},
{
"name": "latent_image",
"type": "LATENT",
"link": 88
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
656821733471329,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [602, -120],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [85],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-fill-dev.safetensors", "default"]
},
{
"id": 17,
"type": "LoadImage",
"pos": [587, 312],
"size": [315, 314.0],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [99],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [100],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_inpaint_example.png", "image"]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [144, -7],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"anime girl with massive fennec ears blonde hair blue eyes wearing a pink shirt"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 45,
"type": "MarkdownNote",
"pos": [-225, 255],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#fill-inpainting-model)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[77, 38, 0, 3, 1, "CONDITIONING"],
[78, 38, 1, 3, 2, "CONDITIONING"],
[80, 26, 0, 38, 0, "CONDITIONING"],
[81, 7, 0, 38, 1, "CONDITIONING"],
[82, 32, 0, 38, 2, "VAE"],
[85, 31, 0, 39, 0, "MODEL"],
[86, 39, 0, 3, 0, "MODEL"],
[88, 38, 2, 3, 3, "LATENT"],
[95, 8, 0, 9, 0, "IMAGE"],
[99, 17, 0, 38, 3, "IMAGE"],
[100, 17, 1, 38, 4, "MASK"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.21,
"offset": [566.62, 207.73]
}
},
"version": 0.4,
"models": [
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "flux1-fill-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Fill-dev/blob/main/flux1-fill-dev.safetensors",
"directory": "diffusion_models"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,491 @@
{
"last_node_id": 45,
"last_link_id": 98,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [81],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 32,
"type": "VAELoader",
"pos": [1352, 422],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 82],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [593, 44],
"size": [317.4, 58],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [80],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [30]
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-237, 79],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 39,
"type": "DifferentialDiffusion",
"pos": [1001, -68],
"size": [277.2, 26],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 85
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [86],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DifferentialDiffusion"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 98],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 38,
"type": "InpaintModelConditioning",
"pos": [952, 78],
"size": [302.4, 138],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 80
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 81
},
{
"name": "vae",
"type": "VAE",
"link": 82
},
{
"name": "pixels",
"type": "IMAGE",
"link": 97
},
{
"name": "mask",
"type": "MASK",
"link": 98
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [77],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [78],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [88],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InpaintModelConditioning"
},
"widgets_values": [false]
},
{
"id": 44,
"type": "ImagePadForOutpaint",
"pos": [415, 359],
"size": [315, 174],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 96
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [97],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [98],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ImagePadForOutpaint"
},
"widgets_values": [400, 0, 400, 400, 24]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [144, -7],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["beautiful scenery"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 9,
"type": "SaveImage",
"pos": [1877, 101],
"size": [828.95, 893.85],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 95
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [1280, 100],
"size": [315, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 86
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 77
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 78
},
{
"name": "latent_image",
"type": "LATENT",
"link": 88
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
164211176398261,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 17,
"type": "LoadImage",
"pos": [23, 376],
"size": [315, 314.0],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [96],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [602, -120],
"size": [315, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [85],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-fill-dev.safetensors", "default"]
},
{
"id": 45,
"type": "MarkdownNote",
"pos": [-225, 255],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#fill-inpainting-model)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[77, 38, 0, 3, 1, "CONDITIONING"],
[78, 38, 1, 3, 2, "CONDITIONING"],
[80, 26, 0, 38, 0, "CONDITIONING"],
[81, 7, 0, 38, 1, "CONDITIONING"],
[82, 32, 0, 38, 2, "VAE"],
[85, 31, 0, 39, 0, "MODEL"],
[86, 39, 0, 3, 0, "MODEL"],
[88, 38, 2, 3, 3, "LATENT"],
[95, 8, 0, 9, 0, "IMAGE"],
[96, 17, 0, 44, 0, "IMAGE"],
[97, 44, 0, 38, 3, "IMAGE"],
[98, 44, 1, 38, 4, "MASK"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1,
"offset": [240.64, 211.87]
}
},
"version": 0.4,
"models": [
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "flux1-fill-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Fill-dev/blob/main/flux1-fill-dev.safetensors",
"directory": "diffusion_models"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,951 @@
{
"last_node_id": 44,
"last_link_id": 123,
"nodes": [
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [48, 288],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"t5xxl_fp16.safetensors",
"clip_l.safetensors",
"flux",
"default"
]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [480, 1008],
"size": [315, 106],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 55,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [20]
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": ["simple", 20, 1]
},
{
"id": 16,
"type": "KSamplerSelect",
"pos": [480, 912],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [19]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [480, 144],
"size": [317.4, 58],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [122],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [3.5],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [864, 192],
"size": [272.36, 124.54],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 116,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [24],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 25,
"type": "RandomNoise",
"pos": [480, 768],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [958831004022715, "randomize"],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [866, 367],
"size": [210, 46],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 24
},
{
"name": "vae",
"type": "VAE",
"link": 12
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 30,
"type": "ModelSamplingFlux",
"pos": [480, 1152],
"size": [315, 130],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56,
"slot_index": 0
},
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 115,
"slot_index": 1
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 114,
"slot_index": 2
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [54, 55],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingFlux"
},
"widgets_values": [1.15, 0.5, 1024, 1024]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": [315, 106],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 112
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 113
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [116],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 34,
"type": "PrimitiveNode",
"pos": [432, 480],
"size": [210, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "width"
},
"links": [112, 115],
"slot_index": 0
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 35,
"type": "PrimitiveNode",
"pos": [672, 480],
"size": [210, 82],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "height"
},
"links": [113, 114],
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [48, 144],
"size": [315, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [56],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-dev.safetensors", "default"],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 9,
"type": "SaveImage",
"pos": [1155, 196],
"size": [985.3, 1060.38],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 37,
"type": "Note",
"pos": [480, 1344],
"size": [315.0, 117.98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"The reference sampling implementation auto adjusts the shift value based on the resolution, if you don't want this you can just bypass (CTRL-B) this ModelSamplingFlux node.\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "VAELoader",
"pos": [48, 432],
"size": [311.82, 60.43],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 28,
"type": "Note",
"pos": [48, 576],
"size": [336, 288],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"If you get an error in any of the nodes above make sure the files are in the correct directories.\n\nSee the top of the examples page for the links : https://comfyanonymous.github.io/ComfyUI_examples/flux/\n\nflux1-dev.safetensors goes in: ComfyUI/models/diffusion_models/\n\nt5xxl_fp16.safetensors and clip_l.safetensors go in: ComfyUI/models/text_encoders/\n\nae.safetensors goes in: ComfyUI/models/vae/\n\n\nTip: You can set the weight_dtype above to one of the fp8 types if you have memory issues."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 39,
"type": "CLIPVisionEncode",
"pos": [420, -300],
"size": [290, 78],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 117
},
{
"name": "image",
"type": "IMAGE",
"link": 118
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [120],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 40,
"type": "LoadImage",
"pos": [60, -300],
"size": [315, 314],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [118]
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 42,
"type": "StyleModelLoader",
"pos": [400, -180],
"size": [340, 60],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STYLE_MODEL",
"type": "STYLE_MODEL",
"links": [119]
}
],
"properties": {
"Node name for S&R": "StyleModelLoader"
},
"widgets_values": ["flux1-redux-dev.safetensors"]
},
{
"id": 38,
"type": "CLIPVisionLoader",
"pos": [60, -410],
"size": [370, 60],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"links": [117],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": ["sigclip_vision_patch14_384.safetensors"]
},
{
"id": 41,
"type": "StyleModelApply",
"pos": [760, -300],
"size": [320, 122],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 122
},
{
"name": "style_model",
"type": "STYLE_MODEL",
"link": 119
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"shape": 7,
"link": 120
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [123],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "StyleModelApply"
},
"widgets_values": [1, "multiply"]
},
{
"id": 22,
"type": "BasicGuider",
"pos": [960, 66],
"size": [222.35, 46],
"flags": {},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 123,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 43,
"type": "Note",
"pos": [1130, -440],
"size": [345.9, 182.31],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"The redux model lets you prompt with images. It can be used with any Flux1 dev or schnell model workflow.\n\nYou can chain multiple \"Apply Style Model\" nodes if you want to mix multiple images together."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 240],
"size": [422.85, 164.31],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 10
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["cute anime girl with massive fluffy fennec ears"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 44,
"type": "MarkdownNote",
"pos": [60, 915],
"size": [225, 60],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#redux)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[10, 11, 0, 6, 0, "CLIP"],
[12, 10, 0, 8, 1, "VAE"],
[19, 16, 0, 13, 2, "SAMPLER"],
[20, 17, 0, 13, 3, "SIGMAS"],
[24, 13, 0, 8, 0, "LATENT"],
[30, 22, 0, 13, 1, "GUIDER"],
[37, 25, 0, 13, 0, "NOISE"],
[41, 6, 0, 26, 0, "CONDITIONING"],
[54, 30, 0, 22, 0, "MODEL"],
[55, 30, 0, 17, 0, "MODEL"],
[56, 12, 0, 30, 0, "MODEL"],
[112, 34, 0, 27, 0, "INT"],
[113, 35, 0, 27, 1, "INT"],
[114, 35, 0, 30, 2, "INT"],
[115, 34, 0, 30, 1, "INT"],
[116, 27, 0, 13, 4, "LATENT"],
[117, 38, 0, 39, 0, "CLIP_VISION"],
[118, 40, 0, 39, 1, "IMAGE"],
[119, 42, 0, 41, 1, "STYLE_MODEL"],
[120, 39, 0, 41, 2, "CLIP_VISION_OUTPUT"],
[122, 26, 0, 41, 0, "CONDITIONING"],
[123, 41, 0, 22, 1, "CONDITIONING"]
],
"groups": [
{
"id": 1,
"title": "Redux Model",
"bounding": [45, -480, 1040, 507.6],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.9,
"offset": [139.8, 57.78]
},
"groupNodes": {
"EmptyLatentImage": {
"nodes": [
{
"type": "PrimitiveNode",
"pos": [432, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 6,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"widget": {
"name": "height"
},
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 0
},
{
"type": "PrimitiveNode",
"pos": [672, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 7,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"slot_index": 0,
"widget": {
"name": "width"
}
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 1
},
{
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"link": null,
"widget": {
"name": "width"
}
},
{
"name": "height",
"type": "INT",
"link": null,
"widget": {
"name": "height"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"index": 2
}
],
"links": [
[1, 0, 2, 0, 34, "INT"],
[0, 0, 2, 1, 35, "INT"]
],
"external": [
[0, 0, "INT"],
[1, 0, "INT"],
[2, 0, "LATENT"]
],
"config": {
"0": {
"output": {
"0": {
"name": "height"
}
},
"input": {
"value": {
"visible": true
}
}
},
"1": {
"output": {
"0": {
"name": "width"
}
},
"input": {
"value": {
"visible": true
}
}
},
"2": {
"input": {
"width": {
"visible": false
},
"height": {
"visible": false
}
}
}
}
}
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "flux1-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/flux1-dev.safetensors?download=true",
"directory": "diffusion_models"
},
{
"name": "sigclip_vision_patch14_384.safetensors",
"url": "https://huggingface.co/Comfy-Org/sigclip_vision_384/resolve/main/sigclip_vision_patch14_384.safetensors?download=true",
"directory": "clip_vision"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-redux-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Redux-dev/resolve/main/flux1-redux-dev.safetensors?download=true",
"directory": "style_models"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
}
]
}

View File

@@ -1,420 +1,302 @@
{
"last_node_id": 36,
"last_link_id": 58,
"nodes": [
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [
390,
400
],
"size": {
"0": 422.84503173828125,
"1": 164.31304931640625
},
"flags": {
"collapsed": true
},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 54,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
55
],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
""
],
"color": "#322",
"bgcolor": "#533"
"last_node_id": 37,
"last_link_id": 58,
"nodes": [
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [390, 400],
"size": [422.85, 164.31],
"flags": {
"collapsed": true
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [
471,
455
],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 0,
"mode": 0,
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
51
],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [
1024,
1024,
1
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
1151,
195
],
"size": {
"0": 210,
"1": 46
},
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 52
},
{
"name": "vae",
"type": "VAE",
"link": 46
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
9
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 54,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [55],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
{
"id": 9,
"type": "SaveImage",
"pos": [
1375,
194
],
"size": {
"0": 985.3012084960938,
"1": 1060.3828125
},
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"properties": {},
"widgets_values": [
"ComfyUI"
]
},
{
"id": 31,
"type": "KSampler",
"pos": [
816,
192
],
"size": {
"0": 315,
"1": 262
},
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 47
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 58
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 55
},
{
"name": "latent_image",
"type": "LATENT",
"link": 51
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
52
],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
173805153958730,
"randomize",
4,
1,
"euler",
"simple",
1
]
},
{
"id": 30,
"type": "CheckpointLoaderSimple",
"pos": [
48,
192
],
"size": {
"0": 315,
"1": 98
},
"flags": {},
"order": 1,
"mode": 0,
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
47
],
"shape": 3,
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [
45,
54
],
"shape": 3,
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
46
],
"shape": 3,
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": [
"flux1-schnell-fp8.safetensors"
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
384,
192
],
"size": {
"0": 422.84503173828125,
"1": 164.31304931640625
},
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 45
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
58
],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a bottle with a beautiful rainbow galaxy inside it on top of a wooden table in the middle of a modern kitchen beside a plate of vegetables and mushrooms and a wine glasse that contains a planet earth with a plate with a half eaten apple pie on it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 34,
"type": "Note",
"pos": [
831,
501
],
"size": {
"0": 282.8617858886719,
"1": 164.08004760742188
},
"flags": {},
"order": 2,
"mode": 0,
"properties": {
"text": ""
},
"widgets_values": [
"Note that Flux dev and schnell do not have any negative prompt so CFG should be set to 1.0. Setting CFG to 1.0 means the negative prompt is ignored.\n\nThe schnell model is a distilled model that can generate a good image with only 4 steps."
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[
9,
8,
0,
9,
0,
"IMAGE"
],
[
45,
30,
1,
6,
0,
"CLIP"
],
[
46,
30,
2,
8,
1,
"VAE"
],
[
47,
30,
0,
31,
0,
"MODEL"
],
[
51,
27,
0,
31,
3,
"LATENT"
],
[
52,
31,
0,
8,
0,
"LATENT"
],
[
54,
30,
1,
33,
0,
"CLIP"
],
[
55,
33,
0,
31,
2,
"CONDITIONING"
],
[
58,
6,
0,
31,
1,
"CONDITIONING"
]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [
0.6836674124529055,
1.8290357611967831
]
}
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
"models": [
{
"name": "flux1-schnell-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-schnell/resolve/main/flux1-schnell-fp8.safetensors?download=true",
"directory": "checkpoints"
}
],
"version": 0.4
}
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [471, 455],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1151, 195],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 52
},
{
"name": "vae",
"type": "VAE",
"link": 46
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1375, 194],
"size": [985.3, 1060.38],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 31,
"type": "KSampler",
"pos": [816, 192],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 47
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 58
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 55
},
{
"name": "latent_image",
"type": "LATENT",
"link": 51
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [52],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
173805153958730,
"randomize",
4,
1,
"euler",
"simple",
1
]
},
{
"id": 30,
"type": "CheckpointLoaderSimple",
"pos": [48, 192],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [47],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [45, 54],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [46],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["flux1-schnell-fp8.safetensors"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 192],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 45
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a bottle with a beautiful rainbow galaxy inside it on top of a wooden table in the middle of a modern kitchen beside a plate of vegetables and mushrooms and a wine glasse that contains a planet earth with a plate with a half eaten apple pie on it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 34,
"type": "Note",
"pos": [831, 501],
"size": [282.86, 164.08],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"Note that Flux dev and schnell do not have any negative prompt so CFG should be set to 1.0. Setting CFG to 1.0 means the negative prompt is ignored.\n\nThe schnell model is a distilled model that can generate a good image with only 4 steps."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 37,
"type": "MarkdownNote",
"pos": [45, 345],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-schnell-1)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[45, 30, 1, 6, 0, "CLIP"],
[46, 30, 2, 8, 1, "VAE"],
[47, 30, 0, 31, 0, "MODEL"],
[51, 27, 0, 31, 3, "LATENT"],
[52, 31, 0, 8, 0, "LATENT"],
[54, 30, 1, 33, 0, "CLIP"],
[55, 33, 0, 31, 2, "CONDITIONING"],
[58, 6, 0, 31, 1, "CONDITIONING"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [0.68, 1.83]
}
},
"version": 0.4,
"models": [
{
"name": "flux1-schnell-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-schnell/resolve/main/flux1-schnell-fp8.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,376 @@
{
"last_node_id": 28,
"last_link_id": 79,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 77
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 57
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1023216319780679,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-260, -340],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [69],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph scenery landscape, snow beautiful scenery mountain, glass bottle; purple galaxy bottle; sun"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [300, 230],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1495, 167],
"size": [493.63, 561.54],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["gligen/testing"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [410, 460],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 27,
"type": "GLIGENTextBoxApply",
"pos": [770, -340],
"size": [437.22, 382.68],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning_to",
"type": "CONDITIONING",
"link": 78
},
{
"name": "clip",
"type": "CLIP",
"link": 74
},
{
"name": "gligen_textbox_model",
"type": "GLIGEN",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [77],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "GLIGENTextBoxApply"
},
"widgets_values": ["sun", 144, 144, 416, 16]
},
{
"id": 21,
"type": "GLIGENTextBoxApply",
"pos": [270, -340],
"size": [437.22, 382.68],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "conditioning_to",
"type": "CONDITIONING",
"link": 69
},
{
"name": "clip",
"type": "CLIP",
"link": 53
},
{
"name": "gligen_textbox_model",
"type": "GLIGEN",
"link": 54
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [65, 78],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "GLIGENTextBoxApply"
},
"widgets_values": ["purple galaxy bottle", 192, 304, 176, 272]
},
{
"id": 10,
"type": "GLIGENLoader",
"pos": [-230, -70],
"size": [390, 60],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "GLIGEN",
"type": "GLIGEN",
"links": [54, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "GLIGENLoader"
},
"widgets_values": ["gligen_sd14_textbox_pruned.safetensors"]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-220, 130],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [5, 53, 67, 74],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [79],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 28,
"type": "MarkdownNote",
"pos": [-210, 285],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/gligen/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[5, 4, 1, 7, 0, "CLIP"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[53, 4, 1, 21, 1, "CLIP"],
[54, 10, 0, 21, 2, "GLIGEN"],
[57, 7, 0, 3, 2, "CONDITIONING"],
[67, 4, 1, 24, 0, "CLIP"],
[69, 24, 0, 21, 0, "CONDITIONING"],
[74, 4, 1, 27, 1, "CLIP"],
[75, 10, 0, 27, 2, "GLIGEN"],
[77, 27, 0, 3, 1, "CONDITIONING"],
[78, 21, 0, 27, 0, "CONDITIONING"],
[79, 4, 2, 8, 1, "VAE"]
],
"groups": [
{
"id": 1,
"title": "Base Prompt",
"bounding": [-315, -465, 518, 302],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "GLIGEN (for best results the elements should match some elements in the base prompt)",
"bounding": [255, -465, 980, 529],
"color": "#A88",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.8,
"offset": [433.59, 361.81]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "gligen_sd14_textbox_pruned.safetensors",
"url": "https://huggingface.co/comfyanonymous/GLIGEN_pruned_safetensors/resolve/main/gligen_sd14_textbox_pruned.safetensors?download=true",
"directory": "gligen"
}
]
}

View File

@@ -0,0 +1,607 @@
{
"last_node_id": 26,
"last_link_id": 35,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1185.5, 412.07],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 30
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 13,
"type": "VAEDecode",
"pos": [3221.22, 232.38],
"size": [210, 46],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 15
},
{
"name": "vae",
"type": "VAE",
"link": 33
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [81.78, 142.34],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 28
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4, 12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece HDR victorian portrait painting of woman, blonde hair, mountain nature, blue sky\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [84.78, 352.34],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 29
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6, 13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands, text, watermark\n"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [142.78, 571.34],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [552.78, 143.34],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 34
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 20],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
251225068430076,
"randomize",
12,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 21,
"type": "VAEDecode",
"pos": [988.18, 29.56],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 20
},
{
"name": "vae",
"type": "VAE",
"link": 32
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAEEncode",
"pos": [2459.1, 103.02],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 26
},
{
"name": "vae",
"type": "VAE",
"link": 31
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncode"
},
"widgets_values": []
},
{
"id": 22,
"type": "ImageUpscaleWithModel",
"pos": [1631.06, 3.66],
"size": [226.8, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "upscale_model",
"type": "UPSCALE_MODEL",
"link": 24
},
{
"name": "image",
"type": "IMAGE",
"link": 23
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageUpscaleWithModel"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1446, 411],
"size": [611.26, 628.6],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "ImageScale",
"pos": [1931, 10],
"size": [315, 130],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 27
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [26],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageScale"
},
"widgets_values": ["bilinear", 1536, 1536, "disabled"]
},
{
"id": 12,
"type": "SaveImage",
"pos": [3463, 230],
"size": [868.01, 936.97],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 17
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 11,
"type": "KSampler",
"pos": [2811.96, 176.22],
"size": [315, 262],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 35,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 12,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 13,
"slot_index": 2
},
{
"name": "latent_image",
"type": "LATENT",
"link": 18,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
783745448521451,
"randomize",
14,
8,
"uni_pc_bh2",
"normal",
0.5
]
},
{
"id": 25,
"type": "CheckpointLoaderSimple",
"pos": [-262, 284],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [34, 35],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [28, 29],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [30, 31, 32, 33],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 23,
"type": "UpscaleModelLoader",
"pos": [1288.06, -39.34],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "UPSCALE_MODEL",
"type": "UPSCALE_MODEL",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UpscaleModelLoader"
},
"widgets_values": ["RealESRGAN_x4plus.pth"]
},
{
"id": 26,
"type": "MarkdownNote",
"pos": [-300, 750],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/#non-latent-upscaling)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[12, 6, 0, 11, 1, "CONDITIONING"],
[13, 7, 0, 11, 2, "CONDITIONING"],
[15, 11, 0, 13, 0, "LATENT"],
[17, 13, 0, 12, 0, "IMAGE"],
[18, 20, 0, 11, 3, "LATENT"],
[20, 3, 0, 21, 0, "LATENT"],
[23, 21, 0, 22, 1, "IMAGE"],
[24, 23, 0, 22, 0, "UPSCALE_MODEL"],
[26, 24, 0, 20, 0, "IMAGE"],
[27, 22, 0, 24, 0, "IMAGE"],
[28, 25, 1, 6, 0, "CLIP"],
[29, 25, 1, 7, 0, "CLIP"],
[30, 25, 2, 8, 1, "VAE"],
[31, 25, 2, 20, 1, "VAE"],
[32, 25, 2, 21, 1, "VAE"],
[33, 25, 2, 13, 1, "VAE"],
[34, 25, 0, 3, 0, "MODEL"],
[35, 25, 0, 11, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Txt2Img",
"bounding": [-300, 0, 1211, 708],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Save Intermediate Image",
"bounding": [1170, 330, 516, 196],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Second pass",
"bounding": [2775, 90, 379, 429],
"color": "#444",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Save Final Image",
"bounding": [3210, 135, 483, 199],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "ESRGAN upscale with 4x model",
"bounding": [1260, -120, 578, 184],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "Decode to Pixel space",
"bounding": [960, -45, 285, 142],
"color": "#A88",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "Encode back to latent space",
"bounding": [2400, 15, 312, 157],
"color": "#A88",
"font_size": 24,
"flags": {}
},
{
"id": 8,
"title": "Downscale image to a more reasonable size",
"bounding": [1845, -75, 483, 245],
"color": "#8AA",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.71,
"offset": [448.42, 482.51]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "RealESRGAN_x4plus.pth",
"url": "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth",
"directory": "upscale_models"
}
]
}

View File

@@ -0,0 +1,442 @@
{
"last_node_id": 17,
"last_link_id": 23,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1235.72, 577.19],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 21
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "LatentUpscale",
"pos": [1238, 170],
"size": [315, 130],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 10
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [14]
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1152, 1152, "disabled"]
},
{
"id": 13,
"type": "VAEDecode",
"pos": [1961, 125],
"size": [210, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 15
},
{
"name": "vae",
"type": "VAE",
"link": 22
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [374, 171],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4, 12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece HDR victorian portrait painting of woman, blonde hair, mountain nature, blue sky\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [377, 381],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6, 13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands, text, watermark\n"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [435, 600],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 11,
"type": "KSampler",
"pos": [1585, 114],
"size": [315, 262],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 12,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 13,
"slot_index": 2
},
{
"name": "latent_image",
"type": "LATENT",
"link": 14,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
469771404043268,
"randomize",
14,
8,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 12,
"type": "SaveImage",
"pos": [2203, 123],
"size": [407.54, 468.13],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 17
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [845, 172],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 18
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
89848141647836,
"randomize",
12,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 16,
"type": "CheckpointLoaderSimple",
"pos": [24, 315],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [18, 23],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [19, 20],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [21, 22],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1495.72, 576.19],
"size": [232.94, 282.43],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 17,
"type": "MarkdownNote",
"pos": [0, 780],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 3, 0, 10, 0, "LATENT"],
[12, 6, 0, 11, 1, "CONDITIONING"],
[13, 7, 0, 11, 2, "CONDITIONING"],
[14, 10, 0, 11, 3, "LATENT"],
[15, 11, 0, 13, 0, "LATENT"],
[17, 13, 0, 12, 0, "IMAGE"],
[18, 16, 0, 3, 0, "MODEL"],
[19, 16, 1, 6, 0, "CLIP"],
[20, 16, 1, 7, 0, "CLIP"],
[21, 16, 2, 8, 1, "VAE"],
[22, 16, 2, 13, 1, "VAE"],
[23, 16, 0, 11, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Txt2Img",
"bounding": [0, 30, 1211, 708],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Save Intermediate Image",
"bounding": [1230, 495, 516, 196],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Hires Fix",
"bounding": [1230, 30, 710, 464],
"color": "#b58b2a",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Save Final Image",
"bounding": [1950, 30, 483, 199],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.97,
"offset": [419.13, 209.33]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,553 @@
{
"last_node_id": 78,
"last_link_id": 215,
"nodes": [
{
"id": 16,
"type": "KSamplerSelect",
"pos": [484, 751],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [19]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [478, 860],
"size": [315, 106],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 190,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [20]
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": ["simple", 20, 1]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [520, 100],
"size": [317.4, 58],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 175
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [129],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [6],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 45,
"type": "EmptyHunyuanLatentVideo",
"pos": [475.54, 432.67],
"size": [315, 130],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [180],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyHunyuanLatentVideo"
},
"widgets_values": [848, 480, 73, 1]
},
{
"id": 22,
"type": "BasicGuider",
"pos": [600, 0],
"size": [222.35, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 195,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 129,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 67,
"type": "ModelSamplingSD3",
"pos": [360, 0],
"size": [210, 58],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 209
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [195],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingSD3"
},
"widgets_values": [7]
},
{
"id": 10,
"type": "VAELoader",
"pos": [0, 420],
"size": [350, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [206, 211],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["hunyuan_video_vae_bf16.safetensors"]
},
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [0, 270],
"size": [350, 106],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [205],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"llava_llama3_fp8_scaled.safetensors",
"hunyuan_video",
"default"
]
},
{
"id": 73,
"type": "VAEDecodeTiled",
"pos": [1150, 200],
"size": [210, 150],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 210
},
{
"name": "vae",
"type": "VAE",
"link": 211
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [215],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecodeTiled"
},
"widgets_values": [256, 64, 64, 8]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1150, 90],
"size": [210, 46],
"flags": {},
"order": 15,
"mode": 2,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 181
},
{
"name": "vae",
"type": "VAE",
"link": 206
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 74,
"type": "Note",
"pos": [1150, 360],
"size": [210, 170],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"Use the tiled decode node by default because most people will need it.\n\nLower the tile_size and overlap if you run out of memory."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [0, 150],
"size": [350, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [190, 209],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["hunyuan_video_t2v_720p_bf16.safetensors", "default"],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 77,
"type": "Note",
"pos": [0, 0],
"size": [350, 110],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"Select a fp8 weight_dtype if you are running out of memory."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [860, 200],
"size": [272.36, 124.54],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 180,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [181, 210],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 44,
"type": "CLIPTextEncode",
"pos": [420, 200],
"size": [422.85, 164.31],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 205
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [175],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"anime style anime girl with massive fennec ears and one big fluffy tail, she has blonde hair long hair blue eyes wearing a pink sweater and a long blue skirt walking in a beautiful outdoor scenery with snow mountains in the background"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 75,
"type": "SaveAnimatedWEBP",
"pos": [1410, 200],
"size": [315, 366],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 215
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 80, "default"]
},
{
"id": 25,
"type": "RandomNoise",
"pos": [479, 618],
"size": [315, 82],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [1, "randomize"],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 78,
"type": "MarkdownNote",
"pos": [0, 525],
"size": [225, 60],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/hunyuan_video/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[19, 16, 0, 13, 2, "SAMPLER"],
[20, 17, 0, 13, 3, "SIGMAS"],
[30, 22, 0, 13, 1, "GUIDER"],
[37, 25, 0, 13, 0, "NOISE"],
[129, 26, 0, 22, 1, "CONDITIONING"],
[175, 44, 0, 26, 0, "CONDITIONING"],
[180, 45, 0, 13, 4, "LATENT"],
[181, 13, 0, 8, 0, "LATENT"],
[190, 12, 0, 17, 0, "MODEL"],
[195, 67, 0, 22, 0, "MODEL"],
[205, 11, 0, 44, 0, "CLIP"],
[206, 10, 0, 8, 1, "VAE"],
[209, 12, 0, 67, 0, "MODEL"],
[210, 13, 0, 73, 0, "LATENT"],
[211, 10, 0, 73, 1, "VAE"],
[215, 73, 0, 75, 0, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"groupNodes": {},
"ds": {
"scale": 0.86,
"offset": [315.94, 195.23]
}
},
"version": 0.4,
"models": [
{
"name": "hunyuan_video_vae_bf16.safetensors",
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/vae/hunyuan_video_vae_bf16.safetensors?download=true",
"directory": "vae"
},
{
"name": "llava_llama3_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/text_encoders/llava_llama3_fp8_scaled.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "hunyuan_video_t2v_720p_bf16.safetensors",
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/diffusion_models/hunyuan_video_t2v_720p_bf16.safetensors?download=true",
"directory": "diffusion_models"
}
]
}

View File

@@ -0,0 +1,314 @@
{
"last_node_id": 24,
"last_link_id": 41,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [867.8, 375.7],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 39
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 17
},
{
"name": "latent_image",
"type": "LATENT",
"link": 18
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
237514639057514,
"randomize",
20,
2.5,
"euler",
"karras",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1207.8, 375.7],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 26
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "SaveAnimatedWEBP",
"pos": [1459, 376],
"size": [741.67, 564.59],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 10
}
],
"outputs": [],
"properties": {
"Node name for S&R": "SaveAnimatedWEBP"
},
"widgets_values": ["ComfyUI", 10, false, 85, "default"]
},
{
"id": 12,
"type": "SVD_img2vid_Conditioning",
"pos": [487.8, 395.7],
"size": [315, 218],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 24
},
{
"name": "init_image",
"type": "IMAGE",
"link": 41,
"slot_index": 1
},
{
"name": "vae",
"type": "VAE",
"link": 25
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"shape": 3,
"links": [40],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"shape": 3,
"links": [17],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"shape": 3,
"links": [18],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "SVD_img2vid_Conditioning"
},
"widgets_values": [1024, 576, 14, 127, 6, 0]
},
{
"id": 14,
"type": "VideoLinearCFGGuidance",
"pos": [487.8, 265.7],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [39],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VideoLinearCFGGuidance"
},
"widgets_values": [1]
},
{
"id": 15,
"type": "ImageOnlyCheckpointLoader",
"pos": [55, 267],
"size": [369.6, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [23],
"slot_index": 0
},
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [24],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [25, 26],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "ImageOnlyCheckpointLoader"
},
"widgets_values": ["svd.safetensors"]
},
{
"id": 23,
"type": "LoadImage",
"pos": [106, 441],
"size": [315, 314.0],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [41]
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["mountains.png", "image"]
},
{
"id": 24,
"type": "MarkdownNote",
"pos": [105, 810],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/video/#image-to-video)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[10, 8, 0, 10, 0, "IMAGE"],
[17, 12, 1, 3, 2, "CONDITIONING"],
[18, 12, 2, 3, 3, "LATENT"],
[23, 15, 0, 14, 0, "MODEL"],
[24, 15, 1, 12, 0, "CLIP_VISION"],
[25, 15, 2, 12, 2, "VAE"],
[26, 15, 2, 8, 1, "VAE"],
[39, 14, 0, 3, 0, "MODEL"],
[40, 12, 0, 3, 1, "CONDITIONING"],
[41, 23, 0, 12, 1, "IMAGE"]
],
"groups": [
{
"id": 1,
"title": "Image to Video",
"bounding": [480, 195, 954, 478],
"color": "#8A8",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.96,
"offset": [255.53, 68.37]
}
},
"version": 0.4,
"models": [
{
"name": "svd.safetensors",
"url": "https://huggingface.co/stabilityai/stable-video-diffusion-img2vid/resolve/main/svd.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,360 @@
{
"last_node_id": 31,
"last_link_id": 87,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [432, 158],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 81
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"outdoors in the yosemite national park mountains nature\n\n\n\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [434, 371],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 82
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["watermark, text\n"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1422, 387],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 42
},
{
"name": "vae",
"type": "VAE",
"link": 83
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [22],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [940, 180],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 80
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 72
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
152545289528694,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 29,
"type": "CheckpointLoaderSimple",
"pos": [17, 303],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [80],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [81, 82],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [83, 84],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["512-inpainting-ema.safetensors"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [-107, 726],
"size": [344, 346],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [85],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_outpaint_example.png", "image"]
},
{
"id": 30,
"type": "ImagePadForOutpaint",
"pos": [269, 727],
"size": [315, 174],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 85
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [87],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [86],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ImagePadForOutpaint"
},
"widgets_values": [0, 128, 0, 128, 40]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1671, 384],
"size": [360.55, 441.53],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 22
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 26,
"type": "VAEEncodeForInpaint",
"pos": [617, 720],
"size": [226.8, 98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 87
},
{
"name": "vae",
"type": "VAE",
"link": 84
},
{
"name": "mask",
"type": "MASK",
"link": 86
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [72],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncodeForInpaint"
},
"widgets_values": [8]
},
{
"id": 31,
"type": "MarkdownNote",
"pos": [30, 465],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/inpaint/#outpainting)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[22, 8, 0, 9, 0, "IMAGE"],
[42, 3, 0, 8, 0, "LATENT"],
[72, 26, 0, 3, 3, "LATENT"],
[80, 29, 0, 3, 0, "MODEL"],
[81, 29, 1, 6, 0, "CLIP"],
[82, 29, 1, 7, 0, "CLIP"],
[83, 29, 2, 8, 1, "VAE"],
[84, 29, 2, 26, 1, "VAE"],
[85, 20, 0, 30, 0, "IMAGE"],
[86, 30, 1, 26, 2, "MASK"],
[87, 30, 0, 26, 0, "IMAGE"]
],
"groups": [
{
"id": 1,
"title": "Load image and pad for outpainting",
"bounding": [-120, 600, 1038, 509],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.86,
"offset": [491.92, 146.6]
}
},
"version": 0.4,
"models": [
{
"name": "512-inpainting-ema.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-inpainting/resolve/main/512-inpainting-ema.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,323 @@
{
"last_node_id": 30,
"last_link_id": 84,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [432, 158],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 81
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"closeup photograph of maine coon (cat:1.2) in the yosemite national park mountains nature"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [434, 371],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 82
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["watermark, text\n"]
},
{
"id": 26,
"type": "VAEEncodeForInpaint",
"pos": [503, 669],
"size": [226.8, 98],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 73
},
{
"name": "vae",
"type": "VAE",
"link": 83
},
{
"name": "mask",
"type": "MASK",
"link": 79
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [72],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncodeForInpaint"
},
"widgets_values": [6]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1422, 387],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 42
},
{
"name": "vae",
"type": "VAE",
"link": 84
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [22],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1709, 356],
"size": [210, 250],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 22
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 29,
"type": "CheckpointLoaderSimple",
"pos": [30, 314],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [80],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [81, 82],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [83, 84],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["512-inpainting-ema.safetensors"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [49, 679],
"size": [385, 365],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [73],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [79],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_inpaint_example.png", "image"]
},
{
"id": 3,
"type": "KSampler",
"pos": [940, 180],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 80
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 72
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1040111309094545,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 30,
"type": "MarkdownNote",
"pos": [30, 480],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/inpaint/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[22, 8, 0, 9, 0, "IMAGE"],
[42, 3, 0, 8, 0, "LATENT"],
[72, 26, 0, 3, 3, "LATENT"],
[73, 20, 0, 26, 0, "IMAGE"],
[79, 20, 1, 26, 2, "MASK"],
[80, 29, 0, 3, 0, "MODEL"],
[81, 29, 1, 6, 0, "CLIP"],
[82, 29, 1, 7, 0, "CLIP"],
[83, 29, 2, 26, 1, "VAE"],
[84, 29, 2, 8, 1, "VAE"]
],
"groups": [
{
"id": 1,
"title": "Load image and alpha mask for inpainting",
"bounding": [-15, 600, 786, 442],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.88,
"offset": [832.78, 166.61]
}
},
"version": 0.4,
"models": [
{
"name": "512-inpainting-ema.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-inpainting/resolve/main/512-inpainting-ema.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,528 @@
{
"last_node_id": 33,
"last_link_id": 62,
"nodes": [
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-60, 229],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [54],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8, 31],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["wd-illusion-fp16.safetensors"]
},
{
"id": 13,
"type": "CheckpointLoaderSimple",
"pos": [1296, -571],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [56],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [27],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["cardosAnime_v10.safetensors"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [370, 40],
"size": [510, 220],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"anime happy girl (fennec:1.2) (ears:1.3) blonde long (messy hair:1.1) blue eyes, wearing serafuku jeans (sitting on rock:1.15) (spread legs:1.15) (sneakers:0.95) in lake rural swiss village on the mountain side sky clouds HDR sunset\n(exceptional, best aesthetic, new, newest, best quality, masterpiece, extremely detailed, anime:1.05)\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [370, 300],
"size": [510, 190],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"lowres, bad anatomy, bad hands, (text:1.1), blurry, mutated hands and fingers, mutation, deformed face, ugly, (logo:1.1), cropped, worst quality, jpeg, (jpeg artifacts), deleted, old, oldest, (censored), (bad aesthetic), (mosaic censoring, bar censor, blur censor) earphones"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [560, 540],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1368, 768, 1]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1280, 140],
"size": [210, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "SaveImage",
"pos": [1540, 140],
"size": [1174.13, 734.16],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 10
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 22,
"type": "CLIPSetLastLayer",
"pos": [1670, -550],
"size": [315, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 27
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [13, 14],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [2060, -920],
"size": [662.38, 313.1],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"from far away anime happy girl (fennec ears:0.95) long (messy hair:1.3) blue eyes, wearing serafuku jeans sitting on rock spread legs (sneakers:0.95) in lake rural swiss village on the mountain side sky clouds HDR sunset\n"
]
},
{
"id": 14,
"type": "CLIPTextEncode",
"pos": [2060, -550],
"size": [660, 300],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 13
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), (text:1.1), letters, numbers, error, cropped, (jpeg artifacts:1.2), (signature:1.1), (watermark:1.1), username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.1), extra legs, (forehead mark) (penis)"
]
},
{
"id": 11,
"type": "VAEDecode",
"pos": [3240, -750],
"size": [210, 46],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 60
},
{
"name": "vae",
"type": "VAE",
"link": 31
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 12,
"type": "SaveImage",
"pos": [3540, -750],
"size": [1868.09, 1101.47],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 12
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 32,
"type": "KSampler",
"pos": [2830, -750],
"size": [315, 262],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 57
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 58
},
{
"name": "latent_image",
"type": "LATENT",
"link": 59
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [60],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
417682270866800,
"randomize",
8,
13,
"dpmpp_sde",
"simple",
0.5
]
},
{
"id": 27,
"type": "LatentUpscaleBy",
"pos": [1510, -160],
"size": [325.41, 82],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 62,
"slot_index": 0
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [59],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscaleBy"
},
"widgets_values": ["bislerp", 1.5]
},
{
"id": 3,
"type": "KSampler",
"pos": [920, 140],
"size": [318.5, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 62],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
758448896326830,
"randomize",
14,
8,
"dpmpp_sde",
"simple",
1
]
},
{
"id": 33,
"type": "MarkdownNote",
"pos": [-45, 375],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/#more-examples)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[3, 4, 1, 6, 0, "CLIP"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[5, 4, 1, 7, 0, "CLIP"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[10, 8, 0, 10, 0, "IMAGE"],
[12, 11, 0, 12, 0, "IMAGE"],
[13, 22, 0, 14, 0, "CLIP"],
[14, 22, 0, 15, 0, "CLIP"],
[27, 13, 1, 22, 0, "CLIP"],
[31, 4, 2, 11, 1, "VAE"],
[54, 4, 0, 3, 0, "MODEL"],
[56, 13, 0, 32, 0, "MODEL"],
[57, 15, 0, 32, 1, "CONDITIONING"],
[58, 14, 0, 32, 2, "CONDITIONING"],
[59, 27, 0, 32, 3, "LATENT"],
[60, 32, 0, 11, 0, "LATENT"],
[62, 3, 0, 27, 0, "LATENT"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.76,
"offset": [1200.17, 444.58]
}
},
"version": 0.4
}

311
public/templates/lora.json Normal file
View File

@@ -0,0 +1,311 @@
{
"last_node_id": 11,
"last_link_id": 14,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 13
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["masterpiece best quality girl"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 12
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
851616030078638,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [210, 250],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-461, 288],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [10],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [11],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 10,
"type": "LoraLoader",
"pos": [-25, 144],
"size": [315, 126],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 10
},
{
"name": "clip",
"type": "CLIP",
"link": 11
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [12],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [13, 14],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoraLoader"
},
"widgets_values": ["epiNoiseoffset_v2.safetensors", 1, 1]
},
{
"id": 11,
"type": "MarkdownNote",
"pos": [-450, 435],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/lora/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 4, 0, 10, 0, "MODEL"],
[11, 4, 1, 10, 1, "CLIP"],
[12, 10, 0, 3, 0, "MODEL"],
[13, 10, 1, 6, 0, "CLIP"],
[14, 10, 1, 7, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.06,
"offset": [777.19, 192.48]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "epiNoiseoffset_v2.safetensors",
"url": "https://civitai.com/api/download/models/16576?type=Model&format=SafeTensor&size=full&fp=fp16",
"directory": "loras"
}
]
}

View File

@@ -0,0 +1,357 @@
{
"last_node_id": 12,
"last_link_id": 18,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 13
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["masterpiece best quality girl"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 12
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
513173432917412,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [210, 250],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 10,
"type": "LoraLoader",
"pos": [-27, 160],
"size": [315, 126],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 15
},
{
"name": "clip",
"type": "CLIP",
"link": 16
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [12],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [13, 14],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoraLoader"
},
"widgets_values": ["epiNoiseoffset_v2.safetensors", 1, 1]
},
{
"id": 11,
"type": "LoraLoader",
"pos": [-379, 160],
"size": [315, 126],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 17
},
{
"name": "clip",
"type": "CLIP",
"link": 18
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [15],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [16],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoraLoader"
},
"widgets_values": ["theovercomer8sContrastFix_sd15.safetensors", 1, 1]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-780, 284],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [17],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [18],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 12,
"type": "MarkdownNote",
"pos": [-765, 450],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/lora/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"],
[12, 10, 0, 3, 0, "MODEL"],
[13, 10, 1, 6, 0, "CLIP"],
[14, 10, 1, 7, 0, "CLIP"],
[15, 11, 0, 10, 0, "MODEL"],
[16, 11, 1, 10, 1, "CLIP"],
[17, 4, 0, 11, 0, "MODEL"],
[18, 4, 1, 11, 1, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.76,
"offset": [1200.17, 444.58]
}
},
"version": 0.4,
"models": [
{
"name": "theovercomer8sContrastFix_sd15.safetensors",
"url": "https://civitai.com/api/download/models/10350?type=Model&format=SafeTensor&size=full&fp=fp16",
"directory": "loras"
},
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "epiNoiseoffset_v2.safetensors",
"url": "https://civitai.com/api/download/models/16576?type=Model&format=SafeTensor&size=full&fp=fp16",
"directory": "loras"
}
]
}

View File

@@ -0,0 +1,482 @@
{
"last_node_id": 79,
"last_link_id": 190,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [420, 190],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [187],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"best quality, 4k, HDR, a tracking shot of a beautiful scene of the sea waves on the beach with a massive explosion in the water"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [420, 390],
"size": [425.28, 180.61],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [188],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"low quality, worst quality, deformed, distorted, disfigured, motion smear, motion artifacts, fused fingers, bad anatomy, weird hand, ugly"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1600, 30],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 171
},
{
"name": "vae",
"type": "VAE",
"link": 87
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [106],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 38,
"type": "CLIPLoader",
"pos": [60, 190],
"size": [315, 82],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [74, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5xxl_fp16.safetensors", "ltxv", "default"]
},
{
"id": 41,
"type": "SaveAnimatedWEBP",
"pos": [1830, 30],
"size": [680, 610],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 106
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 90, "default"]
},
{
"id": 44,
"type": "CheckpointLoaderSimple",
"pos": [520, 30],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [181],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": null
},
{
"name": "VAE",
"type": "VAE",
"links": [87, 189],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["ltx-video-2b-v0.9.safetensors"]
},
{
"id": 69,
"type": "LTXVConditioning",
"pos": [920, 60],
"size": [223.87, 78],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 183
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 184
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [166],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [167],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LTXVConditioning"
},
"widgets_values": [25]
},
{
"id": 71,
"type": "LTXVScheduler",
"pos": [856, 531],
"size": [315, 154],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "latent",
"type": "LATENT",
"shape": 7,
"link": 185
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"links": [182],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LTXVScheduler"
},
"widgets_values": [30, 2.05, 0.95, true, 0.1]
},
{
"id": 72,
"type": "SamplerCustom",
"pos": [1201, 32],
"size": [355.2, 230],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 181
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 166
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 167
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 172
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 182
},
{
"name": "latent_image",
"type": "LATENT",
"link": 186
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"links": [171],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustom"
},
"widgets_values": [true, 501744655390087, "randomize", 3]
},
{
"id": 73,
"type": "KSamplerSelect",
"pos": [860, 420],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"links": [172]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 76,
"type": "Note",
"pos": [40, 350],
"size": [360, 200],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"This model needs long descriptive prompts, if the prompt is too short the quality will suffer greatly."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 77,
"type": "LTXVImgToVideo",
"pos": [863, 181],
"size": [315, 214],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 187
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 188
},
{
"name": "vae",
"type": "VAE",
"link": 189
},
{
"name": "image",
"type": "IMAGE",
"link": 190
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [183],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [184],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [185, 186],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "LTXVImgToVideo"
},
"widgets_values": [768, 512, 97, 1, 0.15]
},
{
"id": 78,
"type": "LoadImage",
"pos": [420, 620],
"size": [385.16, 333.33],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [190]
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["island.jpg", "image"]
},
{
"id": 79,
"type": "MarkdownNote",
"pos": [45, 600],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/ltxv/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[74, 38, 0, 6, 0, "CLIP"],
[75, 38, 0, 7, 0, "CLIP"],
[87, 44, 2, 8, 1, "VAE"],
[106, 8, 0, 41, 0, "IMAGE"],
[166, 69, 0, 72, 1, "CONDITIONING"],
[167, 69, 1, 72, 2, "CONDITIONING"],
[171, 72, 0, 8, 0, "LATENT"],
[172, 73, 0, 72, 3, "SAMPLER"],
[181, 44, 0, 72, 0, "MODEL"],
[182, 71, 0, 72, 4, "SIGMAS"],
[183, 77, 0, 69, 0, "CONDITIONING"],
[184, 77, 1, 69, 1, "CONDITIONING"],
[185, 77, 2, 71, 0, "LATENT"],
[186, 77, 2, 72, 5, "LATENT"],
[187, 6, 0, 77, 0, "CONDITIONING"],
[188, 7, 0, 77, 1, "CONDITIONING"],
[189, 44, 2, 77, 2, "VAE"],
[190, 78, 0, 77, 3, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.23,
"offset": [-35.52, 153.62]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ltx-video-2b-v0.9.safetensors",
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,419 @@
{
"last_node_id": 77,
"last_link_id": 182,
"nodes": [
{
"id": 38,
"type": "CLIPLoader",
"pos": [60, 190],
"size": [315, 82],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [74, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5xxl_fp16.safetensors", "ltxv", "default"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1600, 30],
"size": [210, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 171
},
{
"name": "vae",
"type": "VAE",
"link": 87
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [106],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 69,
"type": "LTXVConditioning",
"pos": [920, 60],
"size": [223.87, 78],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 169
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 170
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [166],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [167],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LTXVConditioning"
},
"widgets_values": [25]
},
{
"id": 72,
"type": "SamplerCustom",
"pos": [1201, 32],
"size": [355.2, 230],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 181
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 166
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 167
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 172
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 182
},
{
"name": "latent_image",
"type": "LATENT",
"link": 175
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"links": [171],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustom"
},
"widgets_values": [true, 497797676867141, "randomize", 3]
},
{
"id": 44,
"type": "CheckpointLoaderSimple",
"pos": [520, 30],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [181],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": null
},
{
"name": "VAE",
"type": "VAE",
"links": [87],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["ltx-video-2b-v0.9.safetensors"]
},
{
"id": 70,
"type": "EmptyLTXVLatentVideo",
"pos": [860, 240],
"size": [315, 130],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [168, 175],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLTXVLatentVideo"
},
"widgets_values": [768, 512, 97, 1]
},
{
"id": 71,
"type": "LTXVScheduler",
"pos": [856, 531],
"size": [315, 154],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "latent",
"type": "LATENT",
"shape": 7,
"link": 168
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"links": [182],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LTXVScheduler"
},
"widgets_values": [30, 2.05, 0.95, true, 0.1]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [420, 190],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [169],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"A woman with long brown hair and light skin smiles at another woman with long blonde hair. The woman with brown hair wears a black jacket and has a small, barely noticeable mole on her right cheek. The camera angle is a close-up, focused on the woman with brown hair's face. The lighting is warm and natural, likely from the setting sun, casting a soft glow on the scene. The scene appears to be real-life footage."
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [420, 390],
"size": [425.28, 180.61],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [170],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"low quality, worst quality, deformed, distorted, disfigured, motion smear, motion artifacts, fused fingers, bad anatomy, weird hand, ugly"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 73,
"type": "KSamplerSelect",
"pos": [860, 420],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"links": [172]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 76,
"type": "Note",
"pos": [40, 350],
"size": [360, 200],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"This model needs long descriptive prompts, if the prompt is too short the quality will suffer greatly."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 41,
"type": "SaveAnimatedWEBP",
"pos": [1830, 30],
"size": [680, 610],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 106
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 90, "default"]
},
{
"id": 77,
"type": "MarkdownNote",
"pos": [45, 600],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/ltxv/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[74, 38, 0, 6, 0, "CLIP"],
[75, 38, 0, 7, 0, "CLIP"],
[87, 44, 2, 8, 1, "VAE"],
[106, 8, 0, 41, 0, "IMAGE"],
[166, 69, 0, 72, 1, "CONDITIONING"],
[167, 69, 1, 72, 2, "CONDITIONING"],
[168, 70, 0, 71, 0, "LATENT"],
[169, 6, 0, 69, 0, "CONDITIONING"],
[170, 7, 0, 69, 1, "CONDITIONING"],
[171, 72, 0, 8, 0, "LATENT"],
[172, 73, 0, 72, 3, "SAMPLER"],
[175, 70, 0, 72, 5, "LATENT"],
[181, 44, 0, 72, 0, "MODEL"],
[182, 71, 0, 72, 4, "SIGMAS"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.65,
"offset": [1490.32, 926.49]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ltx-video-2b-v0.9.safetensors",
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,492 @@
{
"last_node_id": 32,
"last_link_id": 43,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [180, 203],
"size": [425.28, 180.61],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 42
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis)"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [287, 462],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1053, 172],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 28
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-823, -550],
"size": [422.85, 164.31],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 43
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [37],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo) girl photograph realistic (flat chest:0.9), (fennec ears:1.0) (fox ears:1.0), (messy hair) blonde hair, blue eyes, standing, serafuku sweater, (brick house) (scenery HDR landscape) (sun clouds) sky, mountains,\n\n"
]
},
{
"id": 21,
"type": "LoadImage",
"pos": [-560, -144],
"size": [272.84, 372.22],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [33],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["pose_present.png", "image"]
},
{
"id": 31,
"type": "CheckpointLoaderSimple",
"pos": [-1005, 281],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [41],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [42, 43],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AOM3A1.safetensors"]
},
{
"id": 15,
"type": "VAELoader",
"pos": [720, 506],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["kl-f8-anime2.ckpt"]
},
{
"id": 27,
"type": "ControlNetLoader",
"pos": [-641, -245],
"size": [352.55, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [39],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_openpose_fp16.safetensors"]
},
{
"id": 26,
"type": "ControlNetLoader",
"pos": [156, -339],
"size": [343.32, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [38],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_scribble_fp16.safetensors"]
},
{
"id": 22,
"type": "ControlNetApply",
"pos": [-204, -240],
"size": [317.4, 98],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 37
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 39
},
{
"name": "image",
"type": "IMAGE",
"link": 33
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [35],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1]
},
{
"id": 3,
"type": "KSampler",
"pos": [699, 167],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 41
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
894480165483805,
"randomize",
12,
6,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 23,
"type": "ControlNetApply",
"pos": [550.81, -385.59],
"size": [317.4, 98],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 35
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 38
},
{
"name": "image",
"type": "IMAGE",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [0.8]
},
{
"id": 20,
"type": "LoadImage",
"pos": [188, -217],
"size": [278.1, 361.87],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [34],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["house_scribble.png", "image"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1310, 169],
"size": [516.95, 567.67],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 32,
"type": "MarkdownNote",
"pos": [-1005, 435],
"size": [225, 60],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#mixing-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[28, 15, 0, 8, 1, "VAE"],
[33, 21, 0, 22, 2, "IMAGE"],
[34, 20, 0, 23, 2, "IMAGE"],
[35, 22, 0, 23, 0, "CONDITIONING"],
[37, 24, 0, 22, 0, "CONDITIONING"],
[38, 26, 0, 23, 1, "CONTROL_NET"],
[39, 27, 0, 22, 1, "CONTROL_NET"],
[40, 23, 0, 3, 1, "CONDITIONING"],
[41, 31, 0, 3, 0, "MODEL"],
[42, 31, 1, 7, 0, "CLIP"],
[43, 31, 1, 24, 0, "CLIP"]
],
"groups": [
{
"id": 1,
"title": "Apply Pose ControlNet",
"bounding": [-735, -360, 859, 323],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Apply Scribble ControlNet",
"bounding": [165, -480, 739, 336],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.81,
"offset": [2040.05, 734.44]
}
},
"version": 0.4,
"models": [
{
"name": "control_v11p_sd15_scribble_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_scribble_fp16.safetensors?download=true",
"directory": "controlnet"
},
{
"name": "control_v11p_sd15_openpose_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_openpose_fp16.safetensors",
"directory": "controlnet"
}
]
}

View File

@@ -0,0 +1,308 @@
{
"last_node_id": 40,
"last_link_id": 79,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [863, 187],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 79
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 52
},
{
"name": "latent_image",
"type": "LATENT",
"link": 38
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [35],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
704883238463297,
"randomize",
30,
4.5,
"euler",
"simple",
1
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a fox moving quickly in a beautiful winter scenery nature trees sunset tracking camera"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [52],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1210, 190],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 35
},
{
"name": "vae",
"type": "VAE",
"link": 76
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [56],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 21,
"type": "EmptyMochiLatentVideo",
"pos": [520, 620],
"size": [315, 130],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [38],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyMochiLatentVideo"
},
"widgets_values": [848, 480, 37, 1]
},
{
"id": 28,
"type": "SaveAnimatedWEBP",
"pos": [1460, 190],
"size": [847.3, 602.03],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 56
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 80, "default"]
},
{
"id": 37,
"type": "UNETLoader",
"pos": [420, 40],
"size": [315, 82],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [79],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["mochi_preview_bf16.safetensors", "default"]
},
{
"id": 38,
"type": "CLIPLoader",
"pos": [40, 270],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [74, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5xxl_fp16.safetensors", "mochi", "default"]
},
{
"id": 39,
"type": "VAELoader",
"pos": [890, 500],
"size": [278.68, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [76]
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["mochi_vae.safetensors"]
},
{
"id": 40,
"type": "MarkdownNote",
"pos": [45, 405],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/mochi/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[35, 3, 0, 8, 0, "LATENT"],
[38, 21, 0, 3, 3, "LATENT"],
[46, 6, 0, 3, 1, "CONDITIONING"],
[52, 7, 0, 3, 2, "CONDITIONING"],
[56, 8, 0, 28, 0, "IMAGE"],
[74, 38, 0, 6, 0, "CLIP"],
[75, 38, 0, 7, 0, "CLIP"],
[76, 39, 0, 8, 1, "VAE"],
[79, 37, 0, 3, 0, "MODEL"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [35.42, 115.48]
}
},
"version": 0.4,
"models": [
{
"name": "mochi_vae.safetensors",
"url": "https://huggingface.co/Comfy-Org/mochi_preview_repackaged/resolve/main/split_files/vae/mochi_vae.safetensors?download=true",
"directory": "vae"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "mochi_preview_bf16.safetensors",
"url": "https://huggingface.co/Comfy-Org/mochi_preview_repackaged/resolve/main/split_files/diffusion_models/mochi_preview_bf16.safetensors?download=true",
"directory": "diffusion_models"
}
]
}

View File

@@ -0,0 +1,470 @@
{
"last_node_id": 52,
"last_link_id": 105,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1152, 48],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 63
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 33,
"type": "EmptySD3LatentImage",
"pos": [576, 336],
"size": [210, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [66],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 48,
"type": "ImageScale",
"pos": [-320, 448],
"size": [315, 130],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 91
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [92],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageScale"
},
"widgets_values": ["bilinear", 1024, 1024, "center"]
},
{
"id": 49,
"type": "PreviewImage",
"pos": [384, 512],
"size": [443.1, 520.83],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 93
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 50,
"type": "ConditioningZeroOut",
"pos": [203, 133],
"size": [317.4, 26],
"flags": {
"collapsed": true
},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [102],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningZeroOut"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [816, 48],
"size": [284.12, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 103,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 104
},
{
"name": "latent_image",
"type": "LATENT",
"link": 66
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [63],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
790192293768778,
"randomize",
32,
4.5,
"euler",
"simple",
1
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1392, 48],
"size": [882.45, 927.85],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 13
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 45,
"type": "LoadImage",
"pos": [-666, 447],
"size": [288, 336],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [91]
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 47,
"type": "Canny",
"pos": [20, 449],
"size": [315, 82],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 92
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [93, 99],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "Canny"
},
"widgets_values": [0.4, 0.8]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [0, -128],
"size": [320, 192],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 65
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [98, 101],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"happy cute anime fox girl with massive fluffy fennec ears and blonde fluffy hair long hair blue eyes wearing a red scarf a pink sweater and blue jeans\n\nstanding in a beautiful forest with mountains\n\n"
]
},
{
"id": 51,
"type": "ControlNetApplyAdvanced",
"pos": [470, 60],
"size": [315, 186],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 101
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 102
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 100
},
{
"name": "image",
"type": "IMAGE",
"link": 99
},
{
"name": "vae",
"type": "VAE",
"shape": 7,
"link": 105
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [103],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [104],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ControlNetApplyAdvanced"
},
"widgets_values": [0.66, 0, 1]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-576, 64],
"size": [499.99, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [14],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [65],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8, 105],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd3.5_large_fp8_scaled.safetensors"]
},
{
"id": 46,
"type": "ControlNetLoader",
"pos": [-128, 320],
"size": [460.34, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"shape": 3,
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["sd3.5_large_controlnet_canny.safetensors"]
},
{
"id": 52,
"type": "MarkdownNote",
"pos": [-570, 210],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sd3/#sd35-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[8, 4, 2, 8, 1, "VAE"],
[13, 8, 0, 9, 0, "IMAGE"],
[14, 4, 0, 3, 0, "MODEL"],
[63, 3, 0, 8, 0, "LATENT"],
[65, 4, 1, 6, 0, "CLIP"],
[66, 33, 0, 3, 3, "LATENT"],
[91, 45, 0, 48, 0, "IMAGE"],
[92, 48, 0, 47, 0, "IMAGE"],
[93, 47, 0, 49, 0, "IMAGE"],
[98, 6, 0, 50, 0, "CONDITIONING"],
[99, 47, 0, 51, 3, "IMAGE"],
[100, 46, 0, 51, 2, "CONTROL_NET"],
[101, 6, 0, 51, 0, "CONDITIONING"],
[102, 50, 0, 51, 1, "CONDITIONING"],
[103, 51, 0, 3, 1, "CONDITIONING"],
[104, 51, 1, 3, 2, "CONDITIONING"],
[105, 4, 2, 51, 4, "VAE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.91,
"offset": [686.52, 188.52]
}
},
"version": 0.4,
"models": [
{
"name": "sd3.5_large_controlnet_canny.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_canny.safetensors?download=true",
"directory": "controlnet"
},
{
"name": "sd3.5_large_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-3.5-fp8/resolve/main/sd3.5_large_fp8_scaled.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,278 @@
{
"last_node_id": 54,
"last_link_id": 102,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1200, 96],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 53,
"slot_index": 1
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1440, 96],
"size": [952.51, 1007.93],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 51,
"slot_index": 0
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 40,
"type": "CLIPTextEncode",
"pos": [384, 336],
"size": [432, 192],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [80],
"slot_index": 0
}
],
"title": "Negative Prompt",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 53,
"type": "EmptySD3LatentImage",
"pos": [480, 576],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-48, 96],
"size": [384.76, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [99],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [101, 102],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [53],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd3.5_large_fp8_scaled.safetensors"]
},
{
"id": 16,
"type": "CLIPTextEncode",
"pos": [384, 96],
"size": [432, 192],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 101
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [21],
"slot_index": 0
}
],
"title": "Positive Prompt",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a bottle with a pink and red galaxy inside it on top of a wooden table on a table in the middle of a modern kitchen with a window to the outdoors mountain range bright sun clouds forest"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 3,
"type": "KSampler",
"pos": [864, 96],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 99,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 21
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 80
},
{
"name": "latent_image",
"type": "LATENT",
"link": 100
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
585483408983215,
"randomize",
20,
4.01,
"euler",
"sgm_uniform",
1
]
},
{
"id": 54,
"type": "MarkdownNote",
"pos": [-45, 240],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sd3/#sd35)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[21, 16, 0, 3, 1, "CONDITIONING"],
[51, 8, 0, 9, 0, "IMAGE"],
[53, 4, 2, 8, 1, "VAE"],
[80, 40, 0, 3, 2, "CONDITIONING"],
[99, 4, 0, 3, 0, "MODEL"],
[100, 53, 0, 3, 3, "LATENT"],
[101, 4, 1, 16, 0, "CLIP"],
[102, 4, 1, 40, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.14,
"offset": [93.35, -1.71]
}
},
"version": 0.4,
"models": [
{
"name": "sd3.5_large_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-3.5-fp8/resolve/main/sd3.5_large_fp8_scaled.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,728 @@
{
"last_node_id": 49,
"last_link_id": 44,
"nodes": [
{
"id": 36,
"type": "Note",
"pos": [-74, -470],
"size": [315.7, 147.96],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint BASE",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 37,
"type": "Note",
"pos": [610, -460],
"size": [330, 140],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint REFINER",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations."
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 40,
"type": "Note",
"pos": [1325, 234],
"size": [451.5, 424.42],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - KSampler ADVANCED General Information",
"properties": {
"text": ""
},
"widgets_values": [
"Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)."
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [544.5, 651.12],
"size": [300, 110],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 17,
"type": "VAEDecode",
"pos": [2220.77, 129.6],
"size": [200, 50],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 25
},
{
"name": "vae",
"type": "VAE",
"link": 34
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": [],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 41,
"type": "Note",
"pos": [2160.77, 229.6],
"size": [320, 120],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - VAE Decoder",
"properties": {
"text": ""
},
"widgets_values": [
"This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG."
],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 42,
"type": "Note",
"pos": [564.5, 801.12],
"size": [260, 210],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Empty Latent Image",
"properties": {
"text": ""
},
"widgets_values": [
"This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 11,
"type": "KSamplerAdvanced",
"pos": [1800, 130],
"size": [300, 340],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 23
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 24
},
{
"name": "latent_image",
"type": "LATENT",
"link": 13
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 38,
"slot_index": 4
},
{
"name": "start_at_step",
"type": "INT",
"widget": {
"name": "start_at_step"
},
"link": 44
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [25],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - REFINER",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"disable",
0,
"fixed",
25,
8,
"euler",
"normal",
20,
10000,
"disable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 12,
"type": "CheckpointLoaderSimple",
"pos": [600, -611],
"size": [350, 100],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [14],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [19, 20],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [34],
"slot_index": 2
}
],
"title": "Load Checkpoint - REFINER",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_refiner_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-90, -620],
"size": [350, 100],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [10],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [],
"slot_index": 2
}
],
"title": "Load Checkpoint - BASE",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 47,
"type": "PrimitiveNode",
"pos": [1037.53, 881.61],
"size": [210, 82],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"links": [43, 44],
"slot_index": 0
}
],
"title": "end_at_step",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [20, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 45,
"type": "PrimitiveNode",
"pos": [1039.53, 734.61],
"size": [210, 82],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "steps"
},
"links": [38, 41]
}
],
"title": "steps",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [25, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 48,
"type": "Note",
"pos": [1036, 1018],
"size": [213.91, 110.17],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"These can be used to control the total sampling steps and the step at which the sampling switches to the refiner."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "KSamplerAdvanced",
"pos": [1000, 230],
"size": [300, 334],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 10
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 11
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 12
},
{
"name": "latent_image",
"type": "LATENT",
"link": 27
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 41,
"slot_index": 4
},
{
"name": "end_at_step",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"link": 43,
"slot_index": 5
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [13],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - BASE",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"enable",
6767725640732,
"randomize",
25,
8,
"euler",
"normal",
0,
20,
"enable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 16,
"type": "CLIPTextEncode",
"pos": [1110, -90],
"size": [340, 140],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [1110, -270],
"size": [340, 140],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"daytime scenery sky nature dark blue bottle with a galaxy stars milky way in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [610, 30],
"size": [320, 160],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [11],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["daytime sky nature dark blue galaxy bottle"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [610, 240],
"size": [320, 150],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 19,
"type": "SaveImage",
"pos": [2600, 130],
"size": [735.55, 823.98],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 28
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"],
"color": "#222",
"bgcolor": "#000"
},
{
"id": 49,
"type": "MarkdownNote",
"pos": [-90, -255],
"size": [225, 60],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[3, 4, 1, 6, 0, "CLIP"],
[5, 4, 1, 7, 0, "CLIP"],
[10, 4, 0, 10, 0, "MODEL"],
[11, 6, 0, 10, 1, "CONDITIONING"],
[12, 7, 0, 10, 2, "CONDITIONING"],
[13, 10, 0, 11, 3, "LATENT"],
[14, 12, 0, 11, 0, "MODEL"],
[19, 12, 1, 15, 0, "CLIP"],
[20, 12, 1, 16, 0, "CLIP"],
[23, 15, 0, 11, 1, "CONDITIONING"],
[24, 16, 0, 11, 2, "CONDITIONING"],
[25, 11, 0, 17, 0, "LATENT"],
[27, 5, 0, 10, 3, "LATENT"],
[28, 17, 0, 19, 0, "IMAGE"],
[34, 12, 2, 17, 1, "VAE"],
[38, 45, 0, 11, 4, "INT"],
[41, 45, 0, 10, 4, "INT"],
[43, 47, 0, 10, 5, "INT"],
[44, 47, 0, 11, 5, "INT"]
],
"groups": [
{
"id": 1,
"title": "Base Prompt",
"bounding": [585, -60, 366, 463],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Refiner Prompt",
"bounding": [1095, -360, 376, 429],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Load in BASE SDXL Model",
"bounding": [-105, -705, 369, 399],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Load in REFINER SDXL Model",
"bounding": [585, -705, 391, 400],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "Empty Latent Image",
"bounding": [525, 570, 339, 443],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "VAE Decoder",
"bounding": [2145, 45, 360, 350],
"color": "#b06634",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "Step Control",
"bounding": [1005, 630, 284, 524],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.63,
"offset": [1264.03, 812.09]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "sd_xl_refiner_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,490 @@
{
"last_node_id": 41,
"last_link_id": 106,
"nodes": [
{
"id": 13,
"type": "CLIPVisionEncode",
"pos": [135, -63],
"size": [253.6, 78],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 101
},
{
"name": "image",
"type": "IMAGE",
"link": 95
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 36,
"type": "CLIPVisionEncode",
"pos": [137, 24],
"size": [253.6, 78],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 102
},
{
"name": "image",
"type": "IMAGE",
"link": 98
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1277, -210],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 106
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-130, -295],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["anime"]
},
{
"id": 34,
"type": "LoadImage",
"pos": [-352, -29],
"size": [435.35, 377.59],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["mountains.png", "image"]
},
{
"id": 38,
"type": "LoadImage",
"pos": [-341, 412],
"size": [435.35, 377.59],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [98],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sunset.png", "image"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [425, -18],
"size": [425.28, 180.61],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 105
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 40,
"type": "CheckpointLoaderSimple",
"pos": [-761, -275],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [103],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [104, 105],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [106],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"]
},
{
"id": 39,
"type": "CLIPVisionLoader",
"pos": [-760, -120],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [101, 102],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": ["clip_vision_g.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1542, -209],
"size": [635.19, 692.82],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [915, -218],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 103
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 97
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
133632471276133,
"randomize",
26,
8,
"dpmpp_3m_sde_gpu",
"exponential",
1
]
},
{
"id": 19,
"type": "unCLIPConditioning",
"pos": [347, -207],
"size": [262, 102],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 23
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 24
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [96],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 37,
"type": "unCLIPConditioning",
"pos": [626, -205],
"size": [262, 102],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 96
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 100
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [534, 214],
"size": [315, 106],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 41,
"type": "MarkdownNote",
"pos": [-750, -15],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/#revision)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[23, 6, 0, 19, 0, "CONDITIONING"],
[24, 13, 0, 19, 1, "CLIP_VISION_OUTPUT"],
[95, 34, 0, 13, 1, "IMAGE"],
[96, 19, 0, 37, 0, "CONDITIONING"],
[97, 37, 0, 3, 1, "CONDITIONING"],
[98, 38, 0, 36, 1, "IMAGE"],
[100, 36, 0, 37, 1, "CLIP_VISION_OUTPUT"],
[101, 39, 0, 13, 0, "CLIP_VISION"],
[102, 39, 0, 36, 0, "CLIP_VISION"],
[103, 40, 0, 3, 0, "MODEL"],
[104, 40, 1, 6, 0, "CLIP"],
[105, 40, 1, 7, 0, "CLIP"],
[106, 40, 2, 8, 1, "VAE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [962.72, 417.65]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "clip_vision_g.safetensors",
"url": "https://huggingface.co/comfyanonymous/clip_vision_g/resolve/main/clip_vision_g.safetensors?download=true",
"directory": "clip_vision"
}
]
}

View File

@@ -0,0 +1,494 @@
{
"last_node_id": 43,
"last_link_id": 111,
"nodes": [
{
"id": 13,
"type": "CLIPVisionEncode",
"pos": [135, -63],
"size": [253.6, 78],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 101
},
{
"name": "image",
"type": "IMAGE",
"link": 95
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 36,
"type": "CLIPVisionEncode",
"pos": [137, 24],
"size": [253.6, 78],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 102
},
{
"name": "image",
"type": "IMAGE",
"link": 98
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1277, -210],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 106
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 34,
"type": "LoadImage",
"pos": [-352, -29],
"size": [435.35, 377.59],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["mountains.png", "image"]
},
{
"id": 38,
"type": "LoadImage",
"pos": [-341, 412],
"size": [435.35, 377.59],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [98],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sunset.png", "image"]
},
{
"id": 40,
"type": "CheckpointLoaderSimple",
"pos": [-761, -275],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [103],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [104],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [106],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"]
},
{
"id": 39,
"type": "CLIPVisionLoader",
"pos": [-760, -120],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [101, 102],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": ["clip_vision_g.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1542, -209],
"size": [635.19, 692.82],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [915, -218],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 103
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 97
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 111
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
133632471276133,
"randomize",
26,
8,
"dpmpp_3m_sde_gpu",
"exponential",
1
]
},
{
"id": 37,
"type": "unCLIPConditioning",
"pos": [626, -205],
"size": [262, 102],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 96
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 100
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [534, 214],
"size": [315, 106],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 19,
"type": "unCLIPConditioning",
"pos": [347, -207],
"size": [262, 102],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 110
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 24
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [96],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 42,
"type": "ConditioningZeroOut",
"pos": [60, -211],
"size": [211.6, 26],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 109,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [110],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningZeroOut"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-182, -184],
"size": [422.85, 164.31],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [109, 111],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""]
},
{
"id": 43,
"type": "MarkdownNote",
"pos": [-750, -15],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/#revision)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[24, 13, 0, 19, 1, "CLIP_VISION_OUTPUT"],
[95, 34, 0, 13, 1, "IMAGE"],
[96, 19, 0, 37, 0, "CONDITIONING"],
[97, 37, 0, 3, 1, "CONDITIONING"],
[98, 38, 0, 36, 1, "IMAGE"],
[100, 36, 0, 37, 1, "CLIP_VISION_OUTPUT"],
[101, 39, 0, 13, 0, "CLIP_VISION"],
[102, 39, 0, 36, 0, "CLIP_VISION"],
[103, 40, 0, 3, 0, "MODEL"],
[104, 40, 1, 6, 0, "CLIP"],
[106, 40, 2, 8, 1, "VAE"],
[109, 6, 0, 42, 0, "CONDITIONING"],
[110, 42, 0, 19, 0, "CONDITIONING"],
[111, 6, 0, 3, 2, "CONDITIONING"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.49,
"offset": [1046.06, 311.39]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "clip_vision_g.safetensors",
"url": "https://huggingface.co/comfyanonymous/clip_vision_g/resolve/main/clip_vision_g.safetensors?download=true",
"directory": "clip_vision"
}
]
}

View File

@@ -0,0 +1,896 @@
{
"last_node_id": 49,
"last_link_id": 44,
"nodes": [
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [1139.11, -121.79],
"size": [210, 54],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 21,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"evening sunset scenery blue sky nature, glass bottle with a galaxy in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 16,
"type": "CLIPTextEncode",
"pos": [1139.11, -31.79],
"size": [210, 54],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 22,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 14,
"type": "PrimitiveNode",
"pos": [117.74, 335.18],
"size": [300, 160],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"widget": {
"name": "text"
},
"links": [18, 22],
"slot_index": 0
}
],
"title": "Negative Prompt (Text)",
"properties": {
"Run widget replace on values": false
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 13,
"type": "PrimitiveNode",
"pos": [117.74, 135.18],
"size": [300, 160],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"widget": {
"name": "text"
},
"links": [16, 21],
"slot_index": 0
}
],
"title": "Positive Prompt (Text)",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [
"evening sunset scenery blue sky nature, glass bottle with a galaxy in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 36,
"type": "Note",
"pos": [-74, -470],
"size": [315.7, 147.96],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint BASE",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 37,
"type": "Note",
"pos": [610, -460],
"size": [330, 140],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint REFINER",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations."
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 38,
"type": "Note",
"pos": [126.74, 534.18],
"size": [284.33, 123.89],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Text Prompts",
"properties": {
"text": ""
},
"widgets_values": [
"These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type."
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 40,
"type": "Note",
"pos": [1325, 234],
"size": [451.5, 424.42],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - KSampler ADVANCED General Information",
"properties": {
"text": ""
},
"widgets_values": [
"Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)."
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [544.5, 651.12],
"size": [300, 110],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 17,
"type": "VAEDecode",
"pos": [2220.77, 129.6],
"size": [200, 50],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 25
},
{
"name": "vae",
"type": "VAE",
"link": 34
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": [],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 41,
"type": "Note",
"pos": [2160.77, 229.6],
"size": [320, 120],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - VAE Decoder",
"properties": {
"text": ""
},
"widgets_values": [
"This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG."
],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 42,
"type": "Note",
"pos": [564.5, 801.12],
"size": [260, 210],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Empty Latent Image",
"properties": {
"text": ""
},
"widgets_values": [
"This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 43,
"type": "Note",
"pos": [1125, 70],
"size": [240, 80],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - CLIP Encode (REFINER)",
"properties": {
"text": ""
},
"widgets_values": [
"These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [599.5, 269.48],
"size": [210, 54],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 16,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [11],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"evening sunset scenery blue sky nature, glass bottle with a galaxy in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [599.5, 359.48],
"size": [210, 54],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 18,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 39,
"type": "Note",
"pos": [599.5, 449.48],
"size": [210, 80],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - CLIP Encode (BASE)",
"properties": {
"text": ""
},
"widgets_values": [
"These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 11,
"type": "KSamplerAdvanced",
"pos": [1800, 130],
"size": [300, 340],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 23
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 24
},
{
"name": "latent_image",
"type": "LATENT",
"link": 13
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 38,
"slot_index": 4
},
{
"name": "start_at_step",
"type": "INT",
"widget": {
"name": "start_at_step"
},
"link": 44
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [25],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - REFINER",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"disable",
0,
"fixed",
25,
8,
"euler",
"normal",
20,
10000,
"disable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 12,
"type": "CheckpointLoaderSimple",
"pos": [600, -611],
"size": [350, 100],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [14],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [19, 20],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [34],
"slot_index": 2
}
],
"title": "Load Checkpoint - REFINER",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_refiner_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-90, -620],
"size": [350, 100],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [10],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [],
"slot_index": 2
}
],
"title": "Load Checkpoint - BASE",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 19,
"type": "SaveImage",
"pos": [2600, 130],
"size": [565.77, 596.38],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 28
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"],
"color": "#222",
"bgcolor": "#000"
},
{
"id": 47,
"type": "PrimitiveNode",
"pos": [1037.53, 881.61],
"size": [210, 82],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"links": [43, 44],
"slot_index": 0
}
],
"title": "end_at_step",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [20, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 45,
"type": "PrimitiveNode",
"pos": [1039.53, 734.61],
"size": [210, 82],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "steps"
},
"links": [38, 41]
}
],
"title": "steps",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [25, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 48,
"type": "Note",
"pos": [1036, 1018],
"size": [213.91, 110.17],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"These can be used to control the total sampling steps and the step at which the sampling switches to the refiner."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "KSamplerAdvanced",
"pos": [1000, 230],
"size": [300, 334],
"flags": {},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 10
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 11
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 12
},
{
"name": "latent_image",
"type": "LATENT",
"link": 27
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 41,
"slot_index": 4
},
{
"name": "end_at_step",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"link": 43,
"slot_index": 5
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [13],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - BASE",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"enable",
721897303308196,
"randomize",
25,
8,
"euler",
"normal",
0,
20,
"enable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 49,
"type": "MarkdownNote",
"pos": [-105, -255],
"size": [225, 60],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[3, 4, 1, 6, 0, "CLIP"],
[5, 4, 1, 7, 0, "CLIP"],
[10, 4, 0, 10, 0, "MODEL"],
[11, 6, 0, 10, 1, "CONDITIONING"],
[12, 7, 0, 10, 2, "CONDITIONING"],
[13, 10, 0, 11, 3, "LATENT"],
[14, 12, 0, 11, 0, "MODEL"],
[16, 13, 0, 6, 1, "STRING"],
[18, 14, 0, 7, 1, "STRING"],
[19, 12, 1, 15, 0, "CLIP"],
[20, 12, 1, 16, 0, "CLIP"],
[21, 13, 0, 15, 1, "STRING"],
[22, 14, 0, 16, 1, "STRING"],
[23, 15, 0, 11, 1, "CONDITIONING"],
[24, 16, 0, 11, 2, "CONDITIONING"],
[25, 11, 0, 17, 0, "LATENT"],
[27, 5, 0, 10, 3, "LATENT"],
[28, 17, 0, 19, 0, "IMAGE"],
[34, 12, 2, 17, 1, "VAE"],
[38, 45, 0, 11, 4, "INT"],
[41, 45, 0, 10, 4, "INT"],
[43, 47, 0, 10, 5, "INT"],
[44, 47, 0, 11, 5, "INT"]
],
"groups": [
{
"id": 1,
"title": "Base Prompt",
"bounding": [585, 195, 252, 361],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Refiner Prompt",
"bounding": [1095, -195, 282, 372],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Text Prompts",
"bounding": [105, 45, 339, 622],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Load in BASE SDXL Model",
"bounding": [-105, -705, 369, 399],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "Load in REFINER SDXL Model",
"bounding": [585, -705, 391, 400],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "Empty Latent Image",
"bounding": [525, 570, 339, 443],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "VAE Decoder",
"bounding": [2145, 45, 360, 350],
"color": "#b06634",
"font_size": 24,
"flags": {}
},
{
"id": 8,
"title": "Step Control",
"bounding": [1005, 630, 284, 524],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.78,
"offset": [685.2, 1020.68]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "sd_xl_refiner_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,372 @@
{
"last_node_id": 28,
"last_link_id": 54,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [352, 176],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 39
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [20],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 20,
"type": "CheckpointLoaderSimple",
"pos": [-17, -70],
"size": [343.7, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [41, 45],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [38, 39],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [40],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_turbo_1.0_fp16.safetensors"]
},
{
"id": 14,
"type": "KSamplerSelect",
"pos": [452, -144],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [18]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler_ancestral"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [462, 398],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1183, -66],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 28
},
{
"name": "vae",
"type": "VAE",
"link": 40,
"slot_index": 1
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [53, 54],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 25,
"type": "PreviewImage",
"pos": [1213, 93],
"size": [501.7, 541.92],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 53
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 22,
"type": "SDTurboScheduler",
"pos": [452, -248],
"size": [315, 82],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 45,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "SDTurboScheduler"
},
"widgets_values": [1, 1]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [351, -45],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 38,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [19],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"beautiful landscape scenery glass bottle with a galaxy inside cute fennec fox snow HDR sunset"
]
},
{
"id": 27,
"type": "SaveImage",
"pos": [1843, -154],
"size": [466.79, 516.83],
"flags": {},
"order": 10,
"mode": 2,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 54
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 13,
"type": "SamplerCustom",
"pos": [800, -66],
"size": [355.2, 230],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 41,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 19,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 20
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 18,
"slot_index": 3
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 49,
"slot_index": 4
},
{
"name": "latent_image",
"type": "LATENT",
"link": 23,
"slot_index": 5
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [28],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustom"
},
"widgets_values": [true, 0, "fixed", 1]
},
{
"id": 28,
"type": "MarkdownNote",
"pos": [-15, 90],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdturbo/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[18, 14, 0, 13, 3, "SAMPLER"],
[19, 6, 0, 13, 1, "CONDITIONING"],
[20, 7, 0, 13, 2, "CONDITIONING"],
[23, 5, 0, 13, 5, "LATENT"],
[28, 13, 0, 8, 0, "LATENT"],
[38, 20, 1, 6, 0, "CLIP"],
[39, 20, 1, 7, 0, "CLIP"],
[40, 20, 2, 8, 1, "VAE"],
[41, 20, 0, 13, 0, "MODEL"],
[45, 20, 0, 22, 0, "MODEL"],
[49, 22, 0, 13, 4, "SIGMAS"],
[53, 8, 0, 25, 0, "IMAGE"],
[54, 8, 0, 27, 0, "IMAGE"]
],
"groups": [
{
"id": 1,
"title": "Unmute (CTRL-M) if you want to save images.",
"bounding": [1815, -255, 536, 676],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 1.02,
"offset": [311.24, 325.56]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_turbo_1.0_fp16.safetensors",
"url": "https://huggingface.co/stabilityai/sdxl-turbo/resolve/main/sd_xl_turbo_1.0_fp16.safetensors",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,302 @@
{
"last_node_id": 18,
"last_link_id": 26,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [864, 96],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 18
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 12,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
840755638734093,
"randomize",
50,
4.98,
"dpmpp_3m_sde_gpu",
"exponential",
1
]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [0, 240],
"size": [336, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [18],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [14],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["stable_audio_open_1.0.safetensors"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 96],
"size": [432, 144],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 25
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["heaven church electronic dance music"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [384, 288],
"size": [432, 144],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 26
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 10,
"type": "CLIPLoader",
"pos": [0, 96],
"size": [335.65, 82],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [25, 26],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5_base.safetensors", "stable_audio", "default"]
},
{
"id": 11,
"type": "EmptyLatentAudio",
"pos": [576, 480],
"size": [240, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [12]
}
],
"properties": {
"Node name for S&R": "EmptyLatentAudio"
},
"widgets_values": [47.6, 1]
},
{
"id": 12,
"type": "VAEDecodeAudio",
"pos": [1200, 96],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 13
},
{
"name": "vae",
"type": "VAE",
"link": 14,
"slot_index": 1
}
],
"outputs": [
{
"name": "AUDIO",
"type": "AUDIO",
"shape": 3,
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecodeAudio"
},
"widgets_values": []
},
{
"id": 13,
"type": "SaveAudio",
"pos": [1440, 96],
"size": [355.22, 100],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "audio",
"type": "AUDIO",
"link": 15
}
],
"outputs": [],
"properties": {
"Node name for S&R": "SaveAudio"
},
"widgets_values": ["audio/ComfyUI", ""]
},
{
"id": 18,
"type": "MarkdownNote",
"pos": [15, 390],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/audio/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[12, 11, 0, 3, 3, "LATENT"],
[13, 3, 0, 12, 0, "LATENT"],
[14, 4, 2, 12, 1, "VAE"],
[15, 12, 0, 13, 0, "AUDIO"],
[18, 4, 0, 3, 0, "MODEL"],
[25, 10, 0, 6, 0, "CLIP"],
[26, 10, 0, 7, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1,
"offset": [201.78, 380.0]
}
},
"version": 0.4,
"models": [
{
"name": "t5_base.safetensors",
"url": "https://huggingface.co/google-t5/t5-base/resolve/main/model.safetensors",
"directory": "text_encoders"
},
{
"name": "stable_audio_open_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-audio-open-1.0/resolve/main/model.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,273 @@
{
"last_node_id": 27,
"last_link_id": 55,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1207.8, 375.7],
"size": [210, 46],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 26
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [868, 376],
"size": [315, 262],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 42
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 53
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 54
},
{
"name": "latent_image",
"type": "LATENT",
"link": 55
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
237514639057560,
"fixed",
20,
5,
"euler",
"sgm_uniform",
1
]
},
{
"id": 25,
"type": "SaveImage",
"pos": [1459, 378],
"size": [262.29, 308.65],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["3d/ComfyUI"]
},
{
"id": 23,
"type": "LoadImage",
"pos": [175, 438],
"size": [316.52, 405.71],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [51],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["hypernetwork_example_output.png", "image"]
},
{
"id": 26,
"type": "StableZero123_Conditioning",
"pos": [514, 394],
"size": [315, 194],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 50
},
{
"name": "init_image",
"type": "IMAGE",
"link": 51
},
{
"name": "vae",
"type": "VAE",
"link": 52
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"shape": 3,
"links": [53],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"shape": 3,
"links": [54],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"shape": 3,
"links": [55],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "StableZero123_Conditioning"
},
"widgets_values": [256, 256, 1, 10, 142]
},
{
"id": 15,
"type": "ImageOnlyCheckpointLoader",
"pos": [89, 290],
"size": [369.6, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [42],
"slot_index": 0
},
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [50],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [26, 52],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "ImageOnlyCheckpointLoader"
},
"widgets_values": ["stable_zero123.ckpt"]
},
{
"id": 27,
"type": "MarkdownNote",
"pos": [-75, 450],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/3d/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[26, 15, 2, 8, 1, "VAE"],
[42, 15, 0, 3, 0, "MODEL"],
[49, 8, 0, 25, 0, "IMAGE"],
[50, 15, 1, 26, 0, "CLIP_VISION"],
[51, 23, 0, 26, 1, "IMAGE"],
[52, 15, 2, 26, 2, "VAE"],
[53, 26, 0, 3, 1, "CONDITIONING"],
[54, 26, 1, 3, 2, "CONDITIONING"],
[55, 26, 2, 3, 3, "LATENT"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.75,
"offset": [439.73, 40.67]
}
},
"version": 0.4,
"models": [
{
"name": "stable_zero123.ckpt",
"url": "https://huggingface.co/stabilityai/stable-zero123/resolve/main/stable_zero123.ckpt",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,535 @@
{
"last_node_id": 23,
"last_link_id": 40,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [1843.74, 476.56],
"size": [315, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 39
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 17
},
{
"name": "latent_image",
"type": "LATENT",
"link": 18
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
237514639057514,
"randomize",
20,
2.5,
"euler",
"karras",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [2183.74, 476.56],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 26
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "SaveAnimatedWEBP",
"pos": [1654, 829],
"size": [741.67, 564.59],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 10
}
],
"outputs": [],
"properties": {
"Node name for S&R": "SaveAnimatedWEBP"
},
"widgets_values": ["ComfyUI", 10, false, 85, "default"]
},
{
"id": 12,
"type": "SVD_img2vid_Conditioning",
"pos": [1463.74, 496.56],
"size": [315, 218],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 24
},
{
"name": "init_image",
"type": "IMAGE",
"link": 35,
"slot_index": 1
},
{
"name": "vae",
"type": "VAE",
"link": 25
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"shape": 3,
"links": [40],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"shape": 3,
"links": [17],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"shape": 3,
"links": [18],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "SVD_img2vid_Conditioning"
},
"widgets_values": [1024, 576, 25, 127, 6, 0]
},
{
"id": 14,
"type": "VideoLinearCFGGuidance",
"pos": [1463.74, 366.56],
"size": [315, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [39],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VideoLinearCFGGuidance"
},
"widgets_values": [1]
},
{
"id": 15,
"type": "ImageOnlyCheckpointLoader",
"pos": [1050, 320],
"size": [369.6, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [23],
"slot_index": 0
},
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [24],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [25, 26],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "ImageOnlyCheckpointLoader"
},
"widgets_values": ["svd_xt.safetensors"]
},
{
"id": 16,
"type": "CheckpointLoaderSimple",
"pos": [0, 510],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [28],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [29, 31],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [34]
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"]
},
{
"id": 17,
"type": "KSampler",
"pos": [802.4, 566.4],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 28
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 30
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 32
},
{
"name": "latent_image",
"type": "LATENT",
"link": 37,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [33],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
144698910769133,
"randomize",
15,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 18,
"type": "CLIPTextEncode",
"pos": [342.4, 516.4],
"size": [390, 130],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 29
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph beautiful scenery nature mountains alps river rapids snow sky cumulus clouds"
]
},
{
"id": 19,
"type": "CLIPTextEncode",
"pos": [342.4, 696.4],
"size": [390, 130],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 31
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [32],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 20,
"type": "VAEDecode",
"pos": [1172.4, 566.4],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 33
},
{
"name": "vae",
"type": "VAE",
"link": 34,
"slot_index": 1
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [35, 36],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 21,
"type": "PreviewImage",
"pos": [1152.4, 656.4],
"size": [275.95, 246],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 36
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 22,
"type": "EmptyLatentImage",
"pos": [422.4, 866.4],
"size": [310, 110],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 576, 1]
},
{
"id": 23,
"type": "MarkdownNote",
"pos": [0, 660],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/video/#image-to-video)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[10, 8, 0, 10, 0, "IMAGE"],
[17, 12, 1, 3, 2, "CONDITIONING"],
[18, 12, 2, 3, 3, "LATENT"],
[23, 15, 0, 14, 0, "MODEL"],
[24, 15, 1, 12, 0, "CLIP_VISION"],
[25, 15, 2, 12, 2, "VAE"],
[26, 15, 2, 8, 1, "VAE"],
[28, 16, 0, 17, 0, "MODEL"],
[29, 16, 1, 18, 0, "CLIP"],
[30, 18, 0, 17, 1, "CONDITIONING"],
[31, 16, 1, 19, 0, "CLIP"],
[32, 19, 0, 17, 2, "CONDITIONING"],
[33, 17, 0, 20, 0, "LATENT"],
[34, 16, 2, 20, 1, "VAE"],
[35, 20, 0, 12, 1, "IMAGE"],
[36, 20, 0, 21, 0, "IMAGE"],
[37, 22, 0, 17, 3, "LATENT"],
[39, 14, 0, 3, 0, "MODEL"],
[40, 12, 0, 3, 1, "CONDITIONING"]
],
"groups": [
{
"id": 1,
"title": "Image to Video",
"bounding": [1455, 300, 954, 478],
"color": "#8A8",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Text to Image",
"bounding": [330, 435, 1106, 544],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 1.13,
"offset": [502.97, -29.59]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "svd_xt.safetensors",
"url": "https://huggingface.co/stabilityai/stable-video-diffusion-img2vid-xt/resolve/main/svd_xt.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,652 +0,0 @@
{
"last_node_id": 16,
"last_link_id": 23,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [
1235.7215957031258,
577.1878720703122
],
"size": {
"0": 210,
"1": 46
},
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 21
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
9
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
}
},
{
"id": 10,
"type": "LatentUpscale",
"pos": [
1238,
170
],
"size": {
"0": 315,
"1": 130
},
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 10
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
14
]
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": [
"nearest-exact",
1152,
1152,
"disabled"
]
},
{
"id": 13,
"type": "VAEDecode",
"pos": [
1961,
125
],
"size": {
"0": 210,
"1": 46
},
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 15
},
{
"name": "vae",
"type": "VAE",
"link": 22
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
17
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
}
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
374,
171
],
"size": {
"0": 422.84503173828125,
"1": 164.31304931640625
},
"flags": {},
"order": 2,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
4,
12
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece HDR victorian portrait painting of woman, blonde hair, mountain nature, blue sky\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [
377,
381
],
"size": {
"0": 425.27801513671875,
"1": 180.6060791015625
},
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
6,
13
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"bad hands, text, watermark\n"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [
435,
600
],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 0,
"mode": 0,
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
2
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [
768,
768,
1
]
},
{
"id": 11,
"type": "KSampler",
"pos": [
1585,
114
],
"size": {
"0": 315,
"1": 262
},
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 12,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 13,
"slot_index": 2
},
{
"name": "latent_image",
"type": "LATENT",
"link": 14,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
15
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
469771404043268,
"randomize",
14,
8,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 12,
"type": "SaveImage",
"pos": [
2203,
123
],
"size": {
"0": 407.53717041015625,
"1": 468.13226318359375
},
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 17
}
],
"properties": {},
"widgets_values": [
"ComfyUI"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [
845,
172
],
"size": {
"0": 315,
"1": 262
},
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 18
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
7,
10
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
89848141647836,
"randomize",
12,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 16,
"type": "CheckpointLoaderSimple",
"pos": [
24,
315
],
"size": {
"0": 315,
"1": 98
},
"flags": {},
"order": 1,
"mode": 0,
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
18,
23
],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [
19,
20
],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
21,
22
],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": [
"v2-1_768-ema-pruned.safetensors"
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [
1495.7215957031258,
576.1878720703122
],
"size": [
232.9403301043692,
282.4336258387117
],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"properties": {},
"widgets_values": [
"ComfyUI"
]
}
],
"links": [
[
2,
5,
0,
3,
3,
"LATENT"
],
[
4,
6,
0,
3,
1,
"CONDITIONING"
],
[
6,
7,
0,
3,
2,
"CONDITIONING"
],
[
7,
3,
0,
8,
0,
"LATENT"
],
[
9,
8,
0,
9,
0,
"IMAGE"
],
[
10,
3,
0,
10,
0,
"LATENT"
],
[
12,
6,
0,
11,
1,
"CONDITIONING"
],
[
13,
7,
0,
11,
2,
"CONDITIONING"
],
[
14,
10,
0,
11,
3,
"LATENT"
],
[
15,
11,
0,
13,
0,
"LATENT"
],
[
17,
13,
0,
12,
0,
"IMAGE"
],
[
18,
16,
0,
3,
0,
"MODEL"
],
[
19,
16,
1,
6,
0,
"CLIP"
],
[
20,
16,
1,
7,
0,
"CLIP"
],
[
21,
16,
2,
8,
1,
"VAE"
],
[
22,
16,
2,
13,
1,
"VAE"
],
[
23,
16,
0,
11,
0,
"MODEL"
]
],
"groups": [
{
"title": "Txt2Img",
"bounding": [
-1,
30,
1211,
708
],
"color": "#a1309b"
},
{
"title": "Save Intermediate Image",
"bounding": [
1225,
500,
516,
196
],
"color": "#3f789e"
},
{
"title": "Hires Fix",
"bounding": [
1224,
29,
710,
464
],
"color": "#b58b2a"
},
{
"title": "Save Final Image",
"bounding": [
1949,
31,
483,
199
],
"color": "#3f789e"
}
],
"config": {},
"extra": {},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -26,8 +26,9 @@ try {
// Create the PR
console.log('Creating PR...')
const prBody = `Automated update of litegraph to version ${newVersion}. Ref: https://github.com/Comfy-Org/litegraph.js/releases/tag/v${newVersion}`
execSync(
`gh pr create --title "Update litegraph ${newVersion}" --label "dependencies" --body "Automated update of litegraph to version ${newVersion}"`,
`gh pr create --title "Update litegraph ${newVersion}" --label "dependencies" --body "${prBody}"`,
{ stdio: 'inherit' }
)

View File

@@ -45,8 +45,6 @@ body {
height: 100vh;
margin: 0;
overflow: hidden;
grid-template-columns: auto 1fr auto;
grid-template-rows: auto 1fr auto;
background: var(--bg-color) var(--bg-img);
color: var(--fg-color);
min-height: -webkit-fill-available;
@@ -56,87 +54,6 @@ body {
font-family: Arial, sans-serif;
}
/**
+------------------+------------------+------------------+
| |
| .comfyui-body- |
| top |
| (spans all cols) |
| |
+------------------+------------------+------------------+
| | | |
| .comfyui-body- | #graph-canvas | .comfyui-body- |
| left | | right |
| | | |
| | | |
+------------------+------------------+------------------+
| |
| .comfyui-body- |
| bottom |
| (spans all cols) |
| |
+------------------+------------------+------------------+
*/
.comfyui-body-top {
order: -5;
/* Span across all columns */
grid-column: 1/-1;
/* Position at the first row */
grid-row: 1;
/* Top menu bar dropdown needs to be above of graph canvas splitter overlay which is z-index: 999 */
/* Top menu bar z-index needs to be higher than bottom menu bar z-index as by default
pysssss's image feed is located at body-bottom, and it can overlap with the queue button, which
is located in body-top. */
z-index: 1001;
display: flex;
flex-direction: column;
}
.comfyui-body-left {
order: -4;
/* Position in the first column */
grid-column: 1;
/* Position below the top element */
grid-row: 2;
z-index: 10;
display: flex;
}
.graph-canvas-container {
width: 100%;
height: 100%;
order: -3;
grid-column: 2;
grid-row: 2;
position: relative;
overflow: hidden;
}
#graph-canvas {
width: 100%;
height: 100%;
touch-action: none;
}
.comfyui-body-right {
order: -2;
z-index: 10;
grid-column: 3;
grid-row: 2;
}
.comfyui-body-bottom {
order: 4;
/* Span across all columns */
grid-column: 1/-1;
grid-row: 3;
/* Bottom menu bar dropdown needs to be above of graph canvas splitter overlay which is z-index: 999 */
z-index: 1000;
display: flex;
flex-direction: column;
}
.comfy-multiline-input {
background-color: var(--comfy-input-bg);
color: var(--input-text);
@@ -551,82 +468,6 @@ dialog::backdrop {
justify-content: center;
}
#comfy-settings-dialog {
padding: 0;
width: 41rem;
}
#comfy-settings-dialog tr > td:first-child {
text-align: right;
}
#comfy-settings-dialog tbody button,
#comfy-settings-dialog table > button {
background-color: var(--bg-color);
border: 1px var(--border-color) solid;
border-radius: 0;
color: var(--input-text);
font-size: 1rem;
padding: 0.5rem;
}
#comfy-settings-dialog button:hover {
background-color: var(--tr-odd-bg-color);
}
/* General CSS for tables */
.comfy-table {
border-collapse: collapse;
color: var(--input-text);
font-family: Arial, sans-serif;
width: 100%;
}
.comfy-table caption {
position: sticky;
top: 0;
background-color: var(--bg-color);
color: var(--input-text);
font-size: 1rem;
font-weight: bold;
padding: 8px;
text-align: center;
border-bottom: 1px solid var(--border-color);
}
.comfy-table caption .comfy-btn {
position: absolute;
top: -2px;
right: 0;
bottom: 0;
cursor: pointer;
border: none;
height: 100%;
border-radius: 0;
aspect-ratio: 1/1;
user-select: none;
font-size: 20px;
}
.comfy-table caption .comfy-btn:focus {
outline: none;
}
.comfy-table tr:nth-child(even) {
background-color: var(--tr-even-bg-color);
}
.comfy-table tr:nth-child(odd) {
background-color: var(--tr-odd-bg-color);
}
.comfy-table td,
.comfy-table th {
border: 1px solid var(--border-color);
padding: 8px;
}
/* Context menu */
.litegraph .dialog {
@@ -725,24 +566,6 @@ dialog::backdrop {
will-change: transform;
}
@media only screen and (max-width: 450px) {
#comfy-settings-dialog .comfy-table tbody {
display: grid;
}
#comfy-settings-dialog .comfy-table tr {
display: grid;
}
#comfy-settings-dialog tr > td:first-child {
text-align: center;
border-bottom: none;
padding-bottom: 0;
}
#comfy-settings-dialog tr > td:not(:first-child) {
text-align: center;
border-top: none;
}
}
audio.comfy-audio.empty-audio-widget {
display: none;
}
@@ -753,7 +576,6 @@ audio.comfy-audio.empty-audio-widget {
left: 0;
width: 100%;
height: 100%;
pointer-events: none;
}
/* Set auto complete panel's width as it is not accessible within vue-root */

View File

@@ -13,7 +13,7 @@
:aria-label="$t('menu.showMenu')"
aria-live="assertive"
@click="exitFocusMode"
@contextmenu="showNativeMenu"
@contextmenu="showNativeSystemMenu"
/>
<div v-show="menuSetting !== 'Bottom'" class="window-actions-spacer" />
</div>
@@ -26,7 +26,7 @@ import { CSSProperties, computed, watchEffect } from 'vue'
import { app } from '@/scripts/app'
import { useSettingStore } from '@/stores/settingStore'
import { useWorkspaceStore } from '@/stores/workspaceStore'
import { showNativeMenu } from '@/utils/envUtil'
import { showNativeSystemMenu } from '@/utils/envUtil'
const workspaceState = useWorkspaceStore()
const settingStore = useSettingStore()
@@ -57,6 +57,6 @@ const positionCSS = computed<CSSProperties>(() =>
<style scoped>
.comfy-menu-hamburger {
@apply pointer-events-auto fixed z-[9999] flex flex-row;
@apply fixed z-[9999] flex flex-row;
}
</style>

Some files were not shown because too many files have changed in this diff Show More