Compare commits

..

117 Commits

Author SHA1 Message Date
bymyself
b198aaaca2 centralize render condition in TopMenuBar 2025-03-06 17:33:16 -07:00
bymyself
7003a9e98b refactor 2025-03-06 02:26:14 -07:00
bymyself
35fb6378d1 add webview store 2025-03-06 02:12:36 -07:00
Robin Huang
b71952c141 Update uv astral python mirror. (#2863) 2025-03-04 18:53:58 -05:00
Chenlei Hu
680268bb29 Generate json schema for node def (#2862) 2025-03-04 17:40:26 -05:00
Chenlei Hu
d0ce2d2597 [Cleanup] Refactor widget construction (#2861) 2025-03-04 17:40:13 -05:00
Chenlei Hu
6255cea181 Use V2 schema in widget constructors (Part 1) (#2860) 2025-03-04 17:22:13 -05:00
Chenlei Hu
89b73429b7 Add back type guard on string widget (#2859) 2025-03-04 16:52:58 -05:00
bymyself
a415da616c Add Comfy Registry store and search hook (#2848) 2025-03-04 16:33:46 -05:00
Chenlei Hu
05b6f6d8a2 [Cleanup] Remove unused LGraphNode.callback (#2857) 2025-03-04 16:05:12 -05:00
Chenlei Hu
2d179ad632 [Refactor] Use node def v2 in registerNodeDef (#2856) 2025-03-04 12:07:13 -05:00
Chenlei Hu
fe5964ceb6 [Refactor] Use V2 node def in ComfyApp (#2854) 2025-03-04 11:14:14 -05:00
Comfy Org PR Bot
f434610979 [chore] Update litegraph to 0.9.6 (#2855)
Co-authored-by: webfiltered <176114999+webfiltered@users.noreply.github.com>
2025-03-04 10:50:43 -05:00
Chenlei Hu
eceea51800 [Test] Add playwright test on Note and MarkdownNote (#2853)
Co-authored-by: github-actions <github-actions@github.com>
2025-03-04 10:04:03 -05:00
Comfy Org PR Bot
ed4d2aa40c 1.12.3 (#2852)
Co-authored-by: huchenlei <20929282+huchenlei@users.noreply.github.com>
2025-03-04 09:36:26 -05:00
Chenlei Hu
f593f3caa4 [Schema] ComfyNodeDefV2 schema (#2847) 2025-03-04 09:15:16 -05:00
filtered
252e07ad17 [Workaround] Fix #2849 with runtime type guard (#2850) 2025-03-04 23:10:09 +11:00
Chenlei Hu
51aafaec08 [TS] Fix InputSpec type in node constructor (#2846) 2025-03-03 21:26:21 -05:00
Comfy Org PR Bot
85aee9838f 1.12.2 (#2843)
Co-authored-by: huchenlei <20929282+huchenlei@users.noreply.github.com>
2025-03-03 18:49:19 -05:00
bymyself
e8efd0d801 Show templates to first time user (#2841) 2025-03-03 18:49:08 -05:00
bymyself
22f0dcc0a0 Fix handling of templates index.json not found (#2842) 2025-03-03 18:48:30 -05:00
Chenlei Hu
b2f3d85e24 [Cleanup] Remove manual double click delay (#2840) 2025-03-03 17:36:26 -05:00
bymyself
e1f23bf02e Rename SVD templates (#2839) 2025-03-03 17:35:36 -05:00
Chenlei Hu
8affd7eec7 [Cleanup] Remove combo connection type check (#2838) 2025-03-03 17:35:18 -05:00
bymyself
47604e6c2d Add Comfy Registry service (#2836) 2025-03-03 16:58:06 -05:00
Chenlei Hu
30c750f787 [i18n] Ignore devtools nodes for i18n (#2835) 2025-03-03 16:57:52 -05:00
Comfy Org PR Bot
b24bc48102 Update locales for node definitions (#2837)
Co-authored-by: huchenlei <20929282+huchenlei@users.noreply.github.com>
2025-03-03 16:57:43 -05:00
Chenlei Hu
603825b2a0 [Refactor] Add util to merge input spec (#2834) 2025-03-03 15:23:47 -05:00
Comfy Org PR Bot
f76995a3b9 [chore] Update litegraph to 0.9.5 (#2833)
Co-authored-by: webfiltered <176114999+webfiltered@users.noreply.github.com>
2025-03-04 06:34:56 +11:00
Comfy Org PR Bot
21f115c077 [chore] Update litegraph to 0.9.4 (#2832)
Co-authored-by: webfiltered <176114999+webfiltered@users.noreply.github.com>
2025-03-03 13:17:50 -05:00
Chenlei Hu
ca1607024f Add tooltip to selection toolbox items (#2829) 2025-03-03 11:22:18 -05:00
Comfy Org PR Bot
1b14e4086e [chore] Update Comfy Registry API types from comfy-api@dac7ff0 (#2830)
Co-authored-by: christian-byrne <72887196+christian-byrne@users.noreply.github.com>
2025-03-03 11:22:03 -05:00
Miguel C
82a8aba704 [Feature] Add InputKnob component and integrate with FormItem (#2821) 2025-03-03 10:54:16 -05:00
bymyself
a38a11f397 Fix update-registry-types workflow (#2820) 2025-03-03 09:40:18 -05:00
Miguel C
132a0ded09 [Feature] Adds the litegraph knob widget support (#2822) 2025-03-03 09:38:57 -05:00
Dr.Lt.Data
c997bcdba1 refine locales/ko (#2824) 2025-03-03 09:37:31 -05:00
filtered
9f36b9daf3 [Test] Add ComfyMouse fixture for Playwright tests (#2826) 2025-03-04 01:34:42 +11:00
Comfy Org PR Bot
bd8672a04a [chore] Update litegraph to 0.9.3 (#2823)
Co-authored-by: webfiltered <176114999+webfiltered@users.noreply.github.com>
2025-03-03 18:32:49 +11:00
Chenlei Hu
f16ef00055 Re-enable add node / add group tests (#2815)
Co-authored-by: github-actions <github-actions@github.com>
2025-03-02 21:58:43 -05:00
bymyself
8584f982a0 Add workflow to generate comfy registry ts types (#2818) 2025-03-02 21:33:34 -05:00
Chenlei Hu
d37ac3aa16 [TS] Fix input spec types in widgets (#2817) 2025-03-02 21:01:50 -05:00
Chenlei Hu
a8bb6c4daa [Cleanup] Remove unused dependency ts-node (#2813) 2025-03-02 18:53:53 -05:00
Comfy Org PR Bot
5543c969b2 1.12.1 (#2812)
Co-authored-by: huchenlei <20929282+huchenlei@users.noreply.github.com>
2025-03-02 17:16:07 -05:00
Chenlei Hu
b30aac6f98 [CI] Use gh action to update electron types (#2811) 2025-03-02 17:15:16 -05:00
Chenlei Hu
fd4263065b [CI] Use gh action to bump version (#2810) 2025-03-02 17:10:30 -05:00
Comfy Org PR Bot
fee833ddb6 [chore] Update litegraph to 0.9.0 (#2809)
Co-authored-by: huchenlei <20929282+huchenlei@users.noreply.github.com>
2025-03-02 17:09:25 -05:00
Chenlei Hu
5b2b3cdacf [CI] Use gh action to update litegraph (#2808) 2025-03-02 16:57:30 -05:00
Chenlei Hu
0386fd7c7d Revert "[nit] Remove hardcoded grid style on body" (#2807) 2025-03-02 15:51:05 -05:00
bymyself
8d515dc309 Use index.json to load workflow templates (#2803) 2025-03-02 15:37:15 -05:00
Chenlei Hu
e6a583e11b [Cleanup] Remove deploy script (#2806) 2025-03-02 15:36:54 -05:00
Chenlei Hu
9431c955a6 [CI] Include workflow templates at build time (#2775) 2025-03-02 15:22:18 -05:00
Chenlei Hu
6303992f4e [CI] Update ComfyUI_frontend_setup_action (#2805) 2025-03-02 15:11:13 -05:00
bymyself
3493a827ee [Docs] Fix example in doc comment (#2804) 2025-03-02 14:22:55 -05:00
Terry Jia
790b284a23 add credit (#2802) 2025-03-02 12:50:22 -05:00
bymyself
224a236896 Fix pasting image from browser on Windows (#2797) 2025-03-02 10:49:57 -05:00
bymyself
0aef39ceee Fix race when creating loader node to handle pasted media (#2799) 2025-03-02 10:49:09 -05:00
Terry Jia
b1713b4c80 [3d] add lineart mode (#2800)
Co-authored-by: github-actions <github-actions@github.com>
2025-03-02 10:48:23 -05:00
Chenlei Hu
699ebe2f93 Update README.md (Release Schedule) (#2796) 2025-03-01 21:39:31 -05:00
Chenlei Hu
f5c21814f9 Remove broken/unused widget[TARGET] (#2795) 2025-03-01 21:26:50 -05:00
Chenlei Hu
ba2797c332 1.12.0 (#2794) 2025-03-01 19:56:47 -05:00
Chenlei Hu
0175db58bb [Type] Add type annotations for widgetInputs litegraph hooks (#2793) 2025-03-01 19:27:21 -05:00
Chenlei Hu
503341b966 Inline numeric widget configurations (#2792) 2025-03-01 18:09:23 -05:00
bymyself
e58fab92d1 Use responsive grid for templates dialog (#2791) 2025-03-01 17:08:41 -05:00
Chenlei Hu
09ab14ac81 [Type] Disallow type upcasting for node input spec (#2790) 2025-03-01 16:58:45 -05:00
filtered
bca0af82a3 [TS] Update type to reflect actual usage (#2788) 2025-03-01 16:44:33 -05:00
bymyself
9b8f9bd597 Allow passthrough to root component when creating dialog (#2787) 2025-03-01 16:43:32 -05:00
filtered
9b5fa95ae2 Update litegraph 0.8.100 (#2786) 2025-03-02 03:51:06 +11:00
Chenlei Hu
1e36b6ef22 [nit] Remove hardcoded grid style on body (#2785) 2025-03-01 11:18:47 -05:00
Chenlei Hu
2b212f9701 S&R improved filename sanitizing (#2784)
Co-authored-by: typpos <28550406+typpos@users.noreply.github.com>
2025-03-01 10:47:42 -05:00
Chenlei Hu
ba4bb5774e [CI] Update frontend install method (#2783) 2025-03-01 10:10:04 -05:00
filtered
b71a851a35 [TS] Use Litegraph strict narrowed type (#2781) 2025-03-01 21:52:34 +11:00
Chenlei Hu
8bcf9e8640 [Cleanup] Remove unused babel dependency (#2780) 2025-02-28 22:21:22 -05:00
Chenlei Hu
a814f9f902 [Test] Run unittest with vitest (#2779) 2025-02-28 22:09:17 -05:00
Chenlei Hu
7d92e453ef 1.11.6 (#2778) 2025-02-28 20:16:57 -05:00
Chenlei Hu
a244f295a6 Remove server elements from unit tests (#2777) 2025-02-28 20:01:40 -05:00
Chenlei Hu
3e54146afd [CI] Refactor test-ui gh action (#2776) 2025-02-28 19:46:57 -05:00
Chenlei Hu
3b051a11a4 [CI] Publish comfyui-frontend-package to pypi (#2774) 2025-02-28 18:22:42 -05:00
bymyself
792c5f2246 Load workflows from webm files (#2772)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-28 16:00:02 -05:00
Chenlei Hu
96768bba97 1.11.5 (#2762) 2025-02-27 19:34:20 -05:00
Chenlei Hu
fc39ce9624 Rewrite/Test rounding logic of numeric widgets (#2758) 2025-02-27 17:52:16 -05:00
Chenlei Hu
cb4a5b88fc Remove magic 10% scale on numeric widget step (#2759) 2025-02-27 16:56:22 -05:00
Chenlei Hu
b685eba689 Update litegraph 0.8.99 (#2761) 2025-02-27 16:30:17 -05:00
bymyself
8775c1d930 Add video_upload to combo input schema (#2760) 2025-02-27 16:22:14 -05:00
Chenlei Hu
1dab413473 [Schema] Expose input options types (#2757) 2025-02-27 14:22:42 -05:00
Chenlei Hu
1d95d639e9 [Refactor] Extract nodeDefSchema from apiSchema (#2756) 2025-02-27 13:39:23 -05:00
bymyself
e380d792c7 Support models metadata in node properties (#2754) 2025-02-27 13:25:16 -05:00
bymyself
0910d485fd [Test] Fix flaky optional combo test (#2755) 2025-02-27 11:13:38 -07:00
Chenlei Hu
cdf42d5ad7 [Refactor] Move zod schemas to schemas/ folder (#2753) 2025-02-27 13:05:01 -05:00
Chenlei Hu
96f02dbf80 [Refactor] Use util.clone in mergeIfValid (#2752) 2025-02-27 11:47:56 -05:00
bymyself
f9157ee05f Update workflow schema to include node pack ID and version (#2751) 2025-02-27 11:15:31 -05:00
Terry Jia
cb6f2e4398 [3d] fix preview camera not sync up issue (#2747) 2025-02-27 08:42:07 -05:00
bymyself
71f3f720bf Lower floor on max history items setting (#2748) 2025-02-27 08:41:50 -05:00
Chenlei Hu
d1fead298f [Revert] Restrict applyToGraph to PrimitiveNode (#2746) 2025-02-26 22:29:13 -05:00
Chenlei Hu
0bc66965f0 [Cleanup] Remove LiteGraph global type declarations (#2745) 2025-02-26 21:50:41 -05:00
Terry Jia
e843f53799 [3d] temp fix preview camera not sync up (#2743) 2025-02-26 21:39:19 -05:00
Chenlei Hu
0259befcdd 1.11.4 (#2744) 2025-02-26 20:20:53 -05:00
Chenlei Hu
3662938080 Update litegraph 0.8.98 (#2742) 2025-02-26 20:10:10 -05:00
Chenlei Hu
ab9c65f28b Type widgetInputs (#2741) 2025-02-26 18:01:56 -05:00
Chenlei Hu
bdfa2efa50 [BugFix] Remove outputs.animated in queueStore (#2740) 2025-02-26 16:04:01 -05:00
Silver
1c408d2f6a replace colorSelect.png cursor (#2738) 2025-02-26 14:13:50 -05:00
bymyself
76818b54e6 Fix widget label extraction (#2737) 2025-02-26 13:44:05 -05:00
Chenlei Hu
2d41aed051 [BugFix] Properly update color button color on first selection (#2736) 2025-02-26 13:43:09 -05:00
bymyself
237b895e8b Add thumbnails for workflow templates (#2729)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-26 10:39:27 -05:00
bymyself
e2087d2a7b Add node to default/tutorial workflow with link to getting started page of docs (#2734) 2025-02-26 10:35:40 -05:00
bymyself
74e8852958 Fix combo values from optional inputs not changed when refreshing (#2733) 2025-02-26 10:35:22 -05:00
Chenlei Hu
3c196f8f97 1.11.3 (#2731) 2025-02-25 21:40:17 -05:00
Chenlei Hu
6dbdde6491 Update litegraph 0.8.97 (#2730) 2025-02-25 21:40:07 -05:00
Chenlei Hu
a784abef0d Type INodeOutputSlot widget hack on PrimitiveNode (#2728) 2025-02-25 19:34:23 -05:00
filtered
c20ea0c523 [TS] Add null check in graph to prompt (#2727) 2025-02-25 13:39:46 -05:00
filtered
101e8dea11 [TS] Update type to match strict Litegraph (#2726) 2025-02-26 04:00:55 +11:00
Chenlei Hu
156013aa24 [Reland] Restrict applyToGraph to PrimitiveNode (#2724) 2025-02-25 10:54:21 -05:00
Comfy Org PR Bot
1a7145fbc9 Update locales for node definitions (#2723)
Co-authored-by: huchenlei <20929282+huchenlei@users.noreply.github.com>
2025-02-25 10:24:09 -05:00
Terry Jia
956b9609fd [3d] disable depth mode (#2720) 2025-02-25 10:05:10 -05:00
filtered
51c16a4f56 [TS] Add null check in paste handler (#2722) 2025-02-25 22:45:37 +11:00
filtered
417a089186 [Refactor] Use more explicit types in usePaste (#2721) 2025-02-25 21:52:47 +11:00
Chenlei Hu
f5cec41130 [i18n] Translate button widget labels (#2719)
Co-authored-by: github-actions <github-actions@github.com>
2025-02-24 20:31:14 -05:00
Chenlei Hu
d3dda14267 Update litegraph 0.8.95 (#2718) 2025-02-24 19:48:44 -05:00
282 changed files with 13213 additions and 29868 deletions

View File

@@ -11,18 +11,9 @@ DEV_SERVER_COMFYUI_URL=http://127.0.0.1:8188
# and public addresses.
VITE_REMOTE_DEV=false
# The target ComfyUI checkout directory to deploy the frontend code to.
# The dist directory will be copied to {DEPLOY_COMFYUI_DIR}/custom_web_versions/main/dev
# Add `--front-end-root {DEPLOY_COMFYUI_DIR}/custom_web_versions/main/dev`
# to ComfyUI launch script to serve the custom web version.
DEPLOY_COMFYUI_DIR=/home/ComfyUI/web
# The directory containing the ComfyUI installation used to run Playwright tests.
# If you aren't using a separate install for testing, point this to your regular install.
TEST_COMFYUI_DIR=/home/ComfyUI
# The directory containing the ComfyUI_examples repo used to extract test workflows.
EXAMPLE_REPO_PATH=tests-ui/ComfyUI_examples
# Whether to enable minification of the frontend code.
ENABLE_MINIFY=true

View File

@@ -13,7 +13,7 @@ jobs:
update-locales:
runs-on: ubuntu-latest
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.3
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend

View File

@@ -10,7 +10,7 @@ jobs:
if: github.event.pull_request.head.repo.full_name == github.repository
runs-on: ubuntu-latest
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.3
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend

View File

@@ -8,11 +8,13 @@ on:
- 'package.json'
jobs:
draft_release:
build:
runs-on: ubuntu-latest
if: >
github.event.pull_request.merged == true &&
contains(github.event.pull_request.labels.*.name, 'Release')
outputs:
version: ${{ steps.current_version.outputs.version }}
steps:
- name: Checkout code
uses: actions/checkout@v4
@@ -21,14 +23,33 @@ jobs:
node-version: 'lts/*'
- name: Get current version
id: current_version
run: echo ::set-output name=version::$(node -p "require('./package.json').version")
run: echo "version=$(node -p "require('./package.json').version")" >> $GITHUB_OUTPUT
- name: Build project
env:
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
run: |
npm ci
npm run fetch-templates
npm run build
npm run zipdist
- name: Upload dist artifact
uses: actions/upload-artifact@v4
with:
name: dist-files
path: |
dist/
dist.zip
draft_release:
needs: build
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Download dist artifact
uses: actions/download-artifact@v4
with:
name: dist-files
- name: Create release
id: create_release
uses: softprops/action-gh-release@v2
@@ -37,17 +58,47 @@ jobs:
with:
files: |
dist.zip
tag_name: v${{ steps.current_version.outputs.version }}
tag_name: v${{ needs.build.outputs.version }}
target_commitish: ${{ github.event.pull_request.base.ref }}
make_latest: ${{ github.event.pull_request.base.ref == 'main' }}
draft: true
prerelease: false
generate_release_notes: true
publish_types:
publish_pypi:
needs: build
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Download dist artifact
uses: actions/download-artifact@v4
with:
name: dist-files
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Install build dependencies
run: python -m pip install build
- name: Setup pypi package
run: |
mkdir -p comfyui_frontend_package/comfyui_frontend_package/static/
cp -r dist/* comfyui_frontend_package/comfyui_frontend_package/static/
- name: Build pypi package
run: python -m build
working-directory: comfyui_frontend_package
env:
COMFYUI_FRONTEND_VERSION: ${{ needs.build.outputs.version }}
- name: Publish pypi package
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.PYPI_TOKEN }}
packages-dir: comfyui_frontend_package/dist
publish_types:
needs: build
runs-on: ubuntu-latest
if: >
github.event.pull_request.merged == true &&
contains(github.event.pull_request.labels.*.name, 'Release')
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4

View File

@@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest
if: github.event.label.name == 'New Browser Test Expectations'
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.3
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend

View File

@@ -2,76 +2,104 @@ name: Tests CI
on:
push:
branches: [ main, master, core/*, desktop/* ]
branches: [main, master, core/*, desktop/*]
pull_request:
branches: [ main, master, dev*, core/*, desktop/* ]
branches: [main, master, dev*, core/*, desktop/*]
jobs:
jest-tests:
setup:
runs-on: ubuntu-latest
outputs:
cache-key: ${{ steps.cache-key.outputs.key }}
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
with:
devtools_ref: 7b81139e904519db8e5481899ef36bbb4393cb6b
- name: Run Jest tests
run: |
npm run test:generate
npm run test:jest -- --verbose
working-directory: ComfyUI_frontend
- name: Checkout ComfyUI
uses: actions/checkout@v4
with:
repository: 'comfyanonymous/ComfyUI'
path: 'ComfyUI'
ref: master
playwright-tests-chromium:
runs-on: ubuntu-latest
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
with:
devtools_ref: 7b81139e904519db8e5481899ef36bbb4393cb6b
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend
- name: Run Playwright tests (Chromium)
run: npx playwright test --project=chromium
working-directory: ComfyUI_frontend
- uses: actions/upload-artifact@v4
if: always()
with:
name: playwright-report-chromium
path: ComfyUI_frontend/playwright-report/
retention-days: 30
- name: Checkout ComfyUI_frontend
uses: actions/checkout@v4
with:
repository: 'Comfy-Org/ComfyUI_frontend'
path: 'ComfyUI_frontend'
playwright-tests-chromium-2x:
runs-on: ubuntu-latest
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
with:
devtools_ref: 7b81139e904519db8e5481899ef36bbb4393cb6b
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend
- name: Run Playwright tests (Chromium 2x)
run: npx playwright test --project=chromium-2x
working-directory: ComfyUI_frontend
- uses: actions/upload-artifact@v4
if: always()
with:
name: playwright-report-chromium-2x
path: ComfyUI_frontend/playwright-report/
retention-days: 30
- name: Checkout ComfyUI_devtools
uses: actions/checkout@v4
with:
repository: 'Comfy-Org/ComfyUI_devtools'
path: 'ComfyUI/custom_nodes/ComfyUI_devtools'
ref: '080e6d4af809a46852d1c4b7ed85f06e8a3a72be'
playwright-tests-mobile-chrome:
- uses: actions/setup-node@v4
with:
node-version: lts/*
- name: Build ComfyUI_frontend
run: |
npm ci
npm run fetch-templates
npm run build
working-directory: ComfyUI_frontend
- name: Generate cache key
id: cache-key
run: echo "key=$(date +%s)" >> $GITHUB_OUTPUT
- name: Cache setup
uses: actions/cache@v3
with:
path: |
ComfyUI
ComfyUI_frontend
key: comfyui-setup-${{ steps.cache-key.outputs.key }}
playwright-tests:
needs: setup
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
browser: [chromium, chromium-2x, mobile-chrome]
steps:
- uses: Comfy-Org/ComfyUI_frontend_setup_action@v2.2
with:
devtools_ref: 7b81139e904519db8e5481899ef36bbb4393cb6b
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend
- name: Run Playwright tests (Mobile Chrome)
run: npx playwright test --project=mobile-chrome
working-directory: ComfyUI_frontend
- uses: actions/upload-artifact@v4
if: always()
with:
name: playwright-report-mobile-chrome
path: ComfyUI_frontend/playwright-report/
retention-days: 30
- name: Restore cached setup
uses: actions/cache@v3
with:
path: |
ComfyUI
ComfyUI_frontend
key: comfyui-setup-${{ needs.setup.outputs.cache-key }}
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- name: Install requirements
run: |
python -m pip install --upgrade pip
pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu
pip install -r requirements.txt
pip install wait-for-it
working-directory: ComfyUI
- name: Start ComfyUI server
run: |
python main.py --cpu --multi-user --front-end-root ../ComfyUI_frontend/dist &
wait-for-it --service 127.0.0.1:8188 -t 600
working-directory: ComfyUI
- name: Install Playwright Browsers
run: npx playwright install chromium --with-deps
working-directory: ComfyUI_frontend
- name: Run Playwright tests (${{ matrix.browser }})
run: npx playwright test --project=${{ matrix.browser }}
working-directory: ComfyUI_frontend
- uses: actions/upload-artifact@v4
if: always()
with:
name: playwright-report-${{ matrix.browser }}
path: ComfyUI_frontend/playwright-report/
retention-days: 30

View File

@@ -0,0 +1,44 @@
name: Update Electron Types
on:
workflow_dispatch:
jobs:
update-electron-types:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: lts/*
cache: 'npm'
- name: Update electron types
run: npm install @comfyorg/comfyui-electron-types@latest
- name: Get new version
id: get-version
run: |
NEW_VERSION=$(node -e "console.log(JSON.parse(require('fs').readFileSync('./package-lock.json')).packages['node_modules/@comfyorg/comfyui-electron-types'].version)")
echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_OUTPUT
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: '[chore] Update electron-types to ${{ steps.get-version.outputs.NEW_VERSION }}'
title: '[chore] Update electron-types to ${{ steps.get-version.outputs.NEW_VERSION }}'
body: |
Automated update of desktop API types to version ${{ steps.get-version.outputs.NEW_VERSION }}.
branch: update-electron-types-${{ steps.get-version.outputs.NEW_VERSION }}
base: main
labels: |
dependencies
Electron

43
.github/workflows/update-litegraph.yaml vendored Normal file
View File

@@ -0,0 +1,43 @@
name: Update Litegraph Dependency
on:
workflow_dispatch:
jobs:
update-litegraph:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: lts/*
- name: Update litegraph
run: npm install @comfyorg/litegraph@latest
- name: Get new version
id: get-version
run: |
NEW_VERSION=$(node -e "console.log(JSON.parse(require('fs').readFileSync('./package-lock.json')).packages['node_modules/@comfyorg/litegraph'].version)")
echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_OUTPUT
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.PR_GH_TOKEN }}
commit-message: '[chore] Update litegraph to ${{ steps.get-version.outputs.NEW_VERSION }}'
title: '[chore] Update litegraph to ${{ steps.get-version.outputs.NEW_VERSION }}'
body: |
Automated update of litegraph to version ${{ steps.get-version.outputs.NEW_VERSION }}.
Ref: https://github.com/Comfy-Org/litegraph.js/releases/tag/v${{ steps.get-version.outputs.NEW_VERSION }}
branch: update-litegraph-${{ steps.get-version.outputs.NEW_VERSION }}
base: main
labels: |
dependencies

View File

@@ -0,0 +1,97 @@
name: Update Comfy Registry API Types
on:
# Manual trigger
workflow_dispatch:
# Triggered from comfy-api repo
repository_dispatch:
types: [comfy-api-updated]
jobs:
update-registry-types:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: lts/*
cache: 'npm'
- name: Install dependencies
run: npm ci
- name: Checkout comfy-api repository
uses: actions/checkout@v4
with:
repository: Comfy-Org/comfy-api
path: comfy-api
token: ${{ secrets.COMFY_API_PAT }}
clean: true
- name: Get API commit information
id: api-info
run: |
cd comfy-api
API_COMMIT=$(git rev-parse --short HEAD)
echo "commit=${API_COMMIT}" >> $GITHUB_OUTPUT
cd ..
- name: Generate API types
run: |
echo "Generating TypeScript types from comfy-api@${{ steps.api-info.outputs.commit }}..."
npx openapi-typescript ./comfy-api/openapi.yml --output ./src/types/comfyRegistryTypes.ts
- name: Validate generated types
run: |
if [ ! -f ./src/types/comfyRegistryTypes.ts ]; then
echo "Error: Types file was not generated."
exit 1
fi
# Check if file is not empty
if [ ! -s ./src/types/comfyRegistryTypes.ts ]; then
echo "Error: Generated types file is empty."
exit 1
fi
- name: Check for changes
id: check-changes
run: |
if [[ -z $(git status --porcelain ./src/types/comfyRegistryTypes.ts) ]]; then
echo "No changes to Comfy Registry API types detected."
echo "changed=false" >> $GITHUB_OUTPUT
exit 0
else
echo "Changes detected in Comfy Registry API types."
echo "changed=true" >> $GITHUB_OUTPUT
fi
- name: Create Pull Request
if: steps.check-changes.outputs.changed == 'true'
uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.PR_GH_TOKEN }}
commit-message: '[chore] Update Comfy Registry API types from comfy-api@${{ steps.api-info.outputs.commit }}'
title: '[chore] Update Comfy Registry API types from comfy-api@${{ steps.api-info.outputs.commit }}'
body: |
## Automated API Type Update
This PR updates the Comfy Registry API types from the latest comfy-api OpenAPI specification.
- API commit: ${{ steps.api-info.outputs.commit }}
- Generated on: ${{ github.event.repository.updated_at }}
These types are automatically generated using openapi-typescript.
branch: update-registry-types-${{ steps.api-info.outputs.commit }}
base: main
labels: CNR
delete-branch: true
add-paths: |
src/types/comfyRegistryTypes.ts

51
.github/workflows/version-bump.yaml vendored Normal file
View File

@@ -0,0 +1,51 @@
name: Version Bump
on:
workflow_dispatch:
inputs:
version_type:
description: 'Version increment type'
required: true
default: 'patch'
type: 'choice'
options:
- patch
- minor
- major
jobs:
bump-version:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: lts/*
cache: 'npm'
- name: Bump version
id: bump-version
run: |
npm version ${{ github.event.inputs.version_type }} --no-git-tag-version
NEW_VERSION=$(node -p "require('./package.json').version")
echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_OUTPUT
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.PR_GH_TOKEN }}
commit-message: '[release] Bump version to ${{ steps.bump-version.outputs.NEW_VERSION }}'
title: '${{ steps.bump-version.outputs.NEW_VERSION }}'
body: |
Automated version bump to ${{ steps.bump-version.outputs.NEW_VERSION }}
branch: version-bump-${{ steps.bump-version.outputs.NEW_VERSION }}
base: main
labels: |
Release

View File

@@ -22,4 +22,6 @@ jobs:
run: npm ci
- name: Run Vitest tests
run: npm run test:component
run: |
npm run test:component
npm run test:unit

7
.gitignore vendored
View File

@@ -48,3 +48,10 @@ dist.zip
# Generated JSON Schemas
/schemas/
# Workflow templates assets
# Hosted on https://github.com/Comfy-Org/workflow_templates
/public/templates/
# Temporary repository directory
templates_repo/

View File

@@ -31,8 +31,22 @@
## Release Schedule
### Nightly Release
The project follows a structured release process for each minor version, consisting of three distinct phases:
1. **Development Phase** - 1 week
- Active development of new features
- Code changes merged to the development branch
2. **Feature Freeze** - 1 week
- No new features accepted
- Only bug fixes are cherry-picked to the release branch
- Testing and stabilization of the codebase
3. **Publication**
- Release is published at the end of the freeze period
- Version is finalized and made available to all users
### Nightly Releases
Nightly releases are published daily at [https://github.com/Comfy-Org/ComfyUI_frontend/releases](https://github.com/Comfy-Org/ComfyUI_frontend/releases).
To use the latest nightly release, add the following command line argument to your ComfyUI launch script:
@@ -41,18 +55,17 @@ To use the latest nightly release, add the following command line argument to yo
--front-end-version Comfy-Org/ComfyUI_frontend@latest
```
#### For Windows Stand-alone Build Users
## Overlapping Release Cycles
The development of successive minor versions overlaps. For example, while version 1.1 is in feature freeze, development for version 1.2 begins simultaneously.
Edit your `run_cpu.bat` or `run_nvidia_gpu.bat` file as follows:
### Example Release Cycle
```bat
.\python_embeded\python.exe -s ComfyUI\main.py --windows-standalone-build --front-end-version Comfy-Org/ComfyUI_frontend@latest
pause
```
### Stable Release
Stable releases are published bi-weekly in the ComfyUI main repository.
| Week | Date Range | Version 1.1 | Version 1.2 | Version 1.3 | Patch Releases |
|------|------------|-------------|-------------|-------------|----------------|
| 1 | Mar 1-7 | Development | - | - | - |
| 2 | Mar 8-14 | Feature Freeze | Development | - | 1.1.0 through 1.1.6 (daily) |
| 3 | Mar 15-21 | Released | Feature Freeze | Development | 1.1.7 through 1.1.13 (daily)<br>1.2.0 through 1.2.6 (daily) |
| 4 | Mar 22-28 | - | Released | Feature Freeze | 1.2.7 through 1.2.13 (daily)<br>1.3.0 through 1.3.6 (daily) |
## Release Summary
@@ -546,10 +559,8 @@ navigate to `http://<server_ip>:5173` (e.g. `http://192.168.2.20:5173` here), to
### Unit Test
- `git clone https://github.com/comfyanonymous/ComfyUI_examples.git` to `tests-ui/ComfyUI_examples` or the EXAMPLE_REPO_PATH location specified in .env
- `npm i` to install all dependencies
- `npm run test:generate` to fetch `tests-ui/data/object_info.json`
- `npm run test:jest` to execute all unit tests.
- `npm run test:unit` to execute all unit tests.
### Component Test
@@ -650,8 +661,3 @@ This will make the new language selectable in the application's settings.
Start the development server, switch to the new language, and verify the translations.
You can switch languages by opening the ComfyUI Settings and selecting from the `ComfyUI > Locale` dropdown box.
## Deploy
- Option 1: Set `DEPLOY_COMFYUI_DIR` in `.env` and run `npm run deploy`.
- Option 2: Copy everything under `dist/` to `ComfyUI/web/` in your ComfyUI checkout manually.

View File

@@ -1,8 +0,0 @@
{
"presets": [
"@babel/preset-env"
],
"plugins": [
"babel-plugin-transform-import-meta"
]
}

View File

@@ -1,7 +1,7 @@
import type { Response } from '@playwright/test'
import { expect, mergeTests } from '@playwright/test'
import type { StatusWsMessage } from '../src/types/apiTypes.ts'
import type { StatusWsMessage } from '../src/schemas/apiSchema.ts'
import { comfyPageFixture } from './fixtures/ComfyPage'
import { webSocketFixture } from './fixtures/ws.ts'

View File

@@ -0,0 +1,49 @@
{
"last_node_id": 1,
"last_link_id": 1,
"nodes": [
{
"id": 1,
"type": "CheckpointLoaderSimple",
"pos": [256, 256],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": null
},
{
"name": "CLIP",
"type": "CLIP",
"links": null
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple",
"models": [
{
"name": "fake_model.safetensors",
"url": "http://localhost:8188/api/devtools/fake_model.safetensors",
"directory": "text_encoders"
}
]
},
"widgets_values": ["fake_model.safetensors"]
}
],
"links": [],
"groups": [],
"config": {},
"extra": {},
"version": 0.4
}

View File

@@ -0,0 +1,55 @@
{
"last_node_id": 2,
"last_link_id": 0,
"nodes": [
{
"id": 1,
"type": "Note",
"pos": [
50, 50
],
"size": [
322.3645935058594,
167.91612243652344
],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"Foo\n123"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 2,
"type": "MarkdownNote",
"pos": [
50, 300
],
"size": [
320.9985656738281,
179.52735900878906
],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"# Bar\n123"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [],
"groups": [],
"config": {},
"extra": {},
"version": 0.4
}

View File

@@ -0,0 +1,37 @@
{
"last_node_id": 16,
"last_link_id": 17,
"nodes": [
{
"id": 16,
"type": "DevToolsNodeWithOptionalComboInput",
"pos": [1605, 480],
"size": [378, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"links": null
}
],
"properties": {
"Node name for S&R": "DevToolsNodeWithOptionalComboInput"
},
"widgets_values": ["Random Unique Option 1740551583.3507228"]
}
],
"links": [],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 2.0875710456451313,
"offset": [-1311.5753953400676, -176.7620403697558]
}
},
"version": 0.4
}

Binary file not shown.

View File

@@ -1,6 +1,6 @@
import { expect } from '@playwright/test'
import type { Palette } from '../src/types/colorPaletteTypes'
import type { Palette } from '../src/schemas/colorPaletteSchema'
import { comfyPageFixture as test } from './fixtures/ComfyPage'
const customColorPalettes: Record<string, Palette> = {

View File

@@ -1,6 +1,6 @@
import { Locator, expect } from '@playwright/test'
import { Keybinding } from '../src/types/keyBindingTypes'
import type { Keybinding } from '../src/schemas/keyBindingSchema'
import { comfyPageFixture as test } from './fixtures/ComfyPage'
test.describe('Load workflow warning', () => {
@@ -78,6 +78,19 @@ test.describe('Missing models warning', () => {
await expect(downloadButton).toBeVisible()
})
test('Should display a warning when missing models are found in node properties', async ({
comfyPage
}) => {
// Load workflow that has a node with models metadata at the node level
await comfyPage.loadWorkflow('missing_models_from_node_properties')
const missingModelsWarning = comfyPage.page.locator('.comfy-missing-models')
await expect(missingModelsWarning).toBeVisible()
const downloadButton = missingModelsWarning.getByLabel('Download')
await expect(downloadButton).toBeVisible()
})
test('Should not display a warning when no missing models are found', async ({
comfyPage
}) => {
@@ -120,14 +133,6 @@ test.describe('Missing models warning', () => {
await expect(missingModelsWarning).not.toBeVisible()
})
test('should show on tutorial workflow', async ({ comfyPage }) => {
await comfyPage.setSetting('Comfy.TutorialCompleted', false)
await comfyPage.setup({ clearStorage: true })
const missingModelsWarning = comfyPage.page.locator('.comfy-missing-models')
await expect(missingModelsWarning).toBeVisible()
expect(await comfyPage.getSetting('Comfy.TutorialCompleted')).toBe(true)
})
// Flaky test after parallelization
// https://github.com/Comfy-Org/ComfyUI_frontend/pull/1400
test.skip('Should download missing model when clicking download button', async ({

View File

@@ -0,0 +1,97 @@
import type { Mouse } from '@playwright/test'
import type { ComfyPage } from './ComfyPage'
import type { Position } from './types'
/**
* Used for drag and drop ops
* @see
* - {@link Mouse.down}
* - {@link Mouse.move}
* - {@link Mouse.up}
*/
export interface DragOptions {
button?: 'left' | 'right' | 'middle'
clickCount?: number
steps?: number
}
/**
* Wraps mouse drag and drop to work with a canvas-based app.
*
* Requires the next frame animated before and after all steps, giving the
* canvas time to render the changes before screenshots are taken.
*/
export class ComfyMouse implements Omit<Mouse, 'move'> {
static defaultSteps = 5
static defaultOptions: DragOptions = { steps: ComfyMouse.defaultSteps }
constructor(readonly comfyPage: ComfyPage) {}
/** The normal Playwright {@link Mouse} property from {@link ComfyPage.page}. */
get mouse() {
return this.comfyPage.page.mouse
}
async nextFrame() {
await this.comfyPage.nextFrame()
}
/** Drags from current location to a new location and hovers there (no pointerup event) */
async drag(to: Position, options = ComfyMouse.defaultOptions) {
const { steps, ...downOptions } = options
await this.mouse.down(downOptions)
await this.nextFrame()
await this.move(to, { steps })
await this.nextFrame()
}
async drop(options = ComfyMouse.defaultOptions) {
await this.mouse.up(options)
await this.nextFrame()
}
async dragAndDrop(
from: Position,
to: Position,
options = ComfyMouse.defaultOptions
) {
const { steps } = options
await this.nextFrame()
await this.move(from, { steps })
await this.drag(to, options)
await this.drop(options)
}
/** @see {@link Mouse.move} */
async move(to: Position, options = ComfyMouse.defaultOptions) {
await this.mouse.move(to.x, to.y, options)
await this.nextFrame()
}
//#region Pass-through
async click(...args: Parameters<Mouse['click']>) {
return await this.mouse.click(...args)
}
async dblclick(...args: Parameters<Mouse['dblclick']>) {
return await this.mouse.dblclick(...args)
}
async down(...args: Parameters<Mouse['down']>) {
return await this.mouse.down(...args)
}
async up(...args: Parameters<Mouse['up']>) {
return await this.mouse.up(...args)
}
async wheel(...args: Parameters<Mouse['wheel']>) {
return await this.mouse.wheel(...args)
}
//#endregion Pass-through
}

View File

@@ -5,12 +5,13 @@ import { test as base } from '@playwright/test'
import dotenv from 'dotenv'
import * as fs from 'fs'
import type { NodeId } from '../../src/schemas/comfyWorkflowSchema'
import type { KeyCombo } from '../../src/schemas/keyBindingSchema'
import type { useWorkspaceStore } from '../../src/stores/workspaceStore'
import type { NodeId } from '../../src/types/comfyWorkflow'
import type { KeyCombo } from '../../src/types/keyBindingTypes'
import { NodeBadgeMode } from '../../src/types/nodeSource'
import { ComfyActionbar } from '../helpers/actionbar'
import { ComfyTemplates } from '../helpers/templates'
import { ComfyMouse } from './ComfyMouse'
import { ComfyNodeSearchBox } from './components/ComfyNodeSearchBox'
import { SettingDialog } from './components/SettingDialog'
import {
@@ -468,6 +469,7 @@ export class ComfyPage {
const getFileType = (fileName: string) => {
if (fileName.endsWith('.png')) return 'image/png'
if (fileName.endsWith('.webp')) return 'image/webp'
if (fileName.endsWith('.webm')) return 'video/webm'
if (fileName.endsWith('.json')) return 'application/json'
return 'application/octet-stream'
}
@@ -909,7 +911,10 @@ export class ComfyPage {
}
}
export const comfyPageFixture = base.extend<{ comfyPage: ComfyPage }>({
export const comfyPageFixture = base.extend<{
comfyPage: ComfyPage
comfyMouse: ComfyMouse
}>({
comfyPage: async ({ page, request }, use, testInfo) => {
const comfyPage = new ComfyPage(page, request)
@@ -940,6 +945,10 @@ export const comfyPageFixture = base.extend<{ comfyPage: ComfyPage }>({
await comfyPage.setup()
await use(comfyPage)
},
comfyMouse: async ({ comfyPage }, use) => {
const comfyMouse = new ComfyMouse(comfyPage)
use(comfyMouse)
}
})

View File

@@ -8,7 +8,7 @@ import type {
HistoryTaskItem,
TaskItem,
TaskOutput
} from '../../../src/types/apiTypes'
} from '../../../src/schemas/apiSchema'
import type { ComfyPage } from '../ComfyPage'
/** keyof TaskOutput[string] */

View File

@@ -1,4 +1,10 @@
import { Locator, Page } from '@playwright/test'
import path from 'path'
import {
TemplateInfo,
WorkflowTemplates
} from '../../src/types/workflowTemplateTypes'
export class ComfyTemplates {
readonly content: Locator
@@ -8,6 +14,20 @@ export class ComfyTemplates {
}
async loadTemplate(id: string) {
await this.content.getByTestId(`template-workflow-${id}`).click()
await this.content
.getByTestId(`template-workflow-${id}`)
.getByRole('img')
.click()
}
async getAllTemplates(): Promise<TemplateInfo[]> {
const templates: WorkflowTemplates[] = await this.page.evaluate(() =>
window['app'].api.getCoreWorkflowTemplates()
)
return templates.flatMap((t) => t.templates)
}
getTemplatePath(filename: string): string {
return path.join('public', 'templates', filename)
}
}

View File

@@ -140,20 +140,14 @@ test.describe('Node Interaction', () => {
})
test('Auto snap&highlight when dragging link over node', async ({
comfyPage
comfyPage,
comfyMouse
}) => {
await comfyPage.setSetting('Comfy.Node.AutoSnapLinkToSlot', true)
await comfyPage.setSetting('Comfy.Node.SnapHighlightsNode', true)
await comfyPage.page.mouse.move(
comfyPage.clipTextEncodeNode1InputSlot.x,
comfyPage.clipTextEncodeNode1InputSlot.y
)
await comfyPage.page.mouse.down()
await comfyPage.page.mouse.move(
comfyPage.clipTextEncodeNode2InputSlot.x,
comfyPage.clipTextEncodeNode2InputSlot.y
)
await comfyMouse.move(comfyPage.clipTextEncodeNode1InputSlot)
await comfyMouse.drag(comfyPage.clipTextEncodeNode2InputSlot)
await expect(comfyPage.canvas).toHaveScreenshot('snapped-highlighted.png')
})
})
@@ -506,24 +500,24 @@ test.describe('Canvas Interaction', () => {
expect(await getCursorStyle()).toBe('default')
})
test('Can pan when dragging a link', async ({ comfyPage }) => {
test('Can pan when dragging a link', async ({ comfyPage, comfyMouse }) => {
const posSlot1 = comfyPage.clipTextEncodeNode1InputSlot
await comfyPage.page.mouse.move(posSlot1.x, posSlot1.y)
await comfyPage.page.mouse.down()
await comfyMouse.move(posSlot1)
const posEmpty = comfyPage.emptySpace
await comfyPage.page.mouse.move(posEmpty.x, posEmpty.y)
await comfyMouse.drag(posEmpty)
await expect(comfyPage.canvas).toHaveScreenshot('dragging-link1.png')
await comfyPage.page.keyboard.down('Space')
await comfyPage.page.mouse.move(posEmpty.x + 100, posEmpty.y + 100)
await comfyMouse.mouse.move(posEmpty.x + 100, posEmpty.y + 100)
// Canvas should be panned.
await expect(comfyPage.canvas).toHaveScreenshot(
'panning-when-dragging-link.png'
)
await comfyPage.page.keyboard.up('Space')
await comfyPage.page.mouse.move(posEmpty.x, posEmpty.y)
await comfyMouse.move(posEmpty)
// Should be back to dragging link mode when space is released.
await expect(comfyPage.canvas).toHaveScreenshot('dragging-link2.png')
await comfyPage.page.mouse.up()
await comfyMouse.drop()
})
test('Can pan very far and back', async ({ comfyPage }) => {

View File

@@ -7,7 +7,8 @@ test.describe('Load Workflow in Media', () => {
'workflow.webp',
'edited_workflow.webp',
'no_workflow.webp',
'large_workflow.webp'
'large_workflow.webp',
'workflow.webm'
].forEach(async (fileName) => {
test(`Load workflow in ${fileName}`, async ({ comfyPage }) => {
await comfyPage.dragAndDropFile(fileName)

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

View File

@@ -0,0 +1,10 @@
import { expect } from '@playwright/test'
import { comfyPageFixture as test } from './fixtures/ComfyPage'
test.describe('Note Node', () => {
test('Can load node nodes', async ({ comfyPage }) => {
await comfyPage.loadWorkflow('note_nodes')
await expect(comfyPage.canvas).toHaveScreenshot('note_nodes.png')
})
})

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

View File

@@ -4,25 +4,19 @@ import { NodeBadgeMode } from '../src/types/nodeSource'
import { comfyPageFixture as test } from './fixtures/ComfyPage'
test.describe('Canvas Right Click Menu', () => {
// See https://github.com/comfyanonymous/ComfyUI/issues/3883
// Right-click menu on canvas's option sequence is not stable.
test.skip('Can add node', async ({ comfyPage }) => {
test('Can add node', async ({ comfyPage }) => {
await comfyPage.rightClickCanvas()
await expect(comfyPage.canvas).toHaveScreenshot('right-click-menu.png')
await comfyPage.page.getByText('Add Node').click()
await comfyPage.nextFrame()
await expect(comfyPage.canvas).toHaveScreenshot('add-node-menu.png')
await comfyPage.page.getByText('loaders').click()
await comfyPage.nextFrame()
await expect(comfyPage.canvas).toHaveScreenshot('add-node-menu-loaders.png')
await comfyPage.page.getByText('Load VAE').click()
await comfyPage.nextFrame()
await expect(comfyPage.canvas).toHaveScreenshot('add-node-node-added.png')
})
// See https://github.com/comfyanonymous/ComfyUI/issues/3883
// Right-click menu on canvas's option sequence is not stable.
test.skip('Can add group', async ({ comfyPage }) => {
test('Can add group', async ({ comfyPage }) => {
await comfyPage.rightClickCanvas()
await expect(comfyPage.canvas).toHaveScreenshot('right-click-menu.png')
await comfyPage.page.getByText('Add Group', { exact: true }).click()

Binary file not shown.

After

Width:  |  Height:  |  Size: 102 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 98 KiB

After

Width:  |  Height:  |  Size: 99 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 117 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 127 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 104 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 100 KiB

After

Width:  |  Height:  |  Size: 101 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 109 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 107 KiB

After

Width:  |  Height:  |  Size: 107 KiB

View File

@@ -4,6 +4,9 @@ import { comfyPageFixture } from './fixtures/ComfyPage'
const test = comfyPageFixture
const BLUE_COLOR = 'rgb(51, 51, 85)'
const RED_COLOR = 'rgb(85, 51, 51)'
test.describe('Selection Toolbox', () => {
test.beforeEach(async ({ comfyPage }) => {
await comfyPage.setSetting('Comfy.Canvas.SelectionToolbox', true)
@@ -150,10 +153,7 @@ test.describe('Selection Toolbox', () => {
await redColorOption.click()
// Button should now show the selected color
await expect(colorPickerButton).toHaveCSS(
'color',
'rgb(85, 51, 51)' // Red color, adjust if different
)
await expect(colorPickerButton).toHaveCSS('color', RED_COLOR)
})
test('color picker shows mixed state for differently colored selections', async ({
@@ -183,5 +183,28 @@ test.describe('Selection Toolbox', () => {
)
await expect(colorPickerButton).not.toHaveAttribute('color')
})
test('color picker shows correct color when selecting pre-colored node', async ({
comfyPage
}) => {
// First color a node
await comfyPage.selectNodes(['KSampler'])
await comfyPage.page.locator('.selection-toolbox .pi-circle-fill').click()
await comfyPage.page
.locator('.color-picker-container i[data-testid="blue"]')
.click()
// Clear selection
await comfyPage.selectNodes(['KSampler'])
// Re-select the node
await comfyPage.selectNodes(['KSampler'])
// Color picker button should show the correct color
const colorPickerButton = comfyPage.page.locator(
'.selection-toolbox .pi-circle-fill'
)
await expect(colorPickerButton).toHaveCSS('color', BLUE_COLOR)
})
})
})

View File

@@ -1,15 +1,60 @@
import { expect } from '@playwright/test'
import fs from 'fs'
import { comfyPageFixture as test } from './fixtures/ComfyPage'
test.describe('Templates', () => {
test.beforeEach(async ({ comfyPage }) => {
await comfyPage.setSetting('Comfy.UseNewMenu', 'Top')
await comfyPage.setSetting('Comfy.Workflow.ShowMissingModelsWarning', false)
})
test('should have a JSON workflow file for each template', async ({
comfyPage
}) => {
const templates = await comfyPage.templates.getAllTemplates()
for (const template of templates) {
const workflowPath = comfyPage.templates.getTemplatePath(
`${template.name}.json`
)
expect(
fs.existsSync(workflowPath),
`Missing workflow: ${template.name}`
).toBe(true)
}
})
test('should have all required thumbnail media for each template', async ({
comfyPage
}) => {
const templates = await comfyPage.templates.getAllTemplates()
for (const template of templates) {
const { name, mediaSubtype, thumbnailVariant } = template
const baseMedia = `${name}-1.${mediaSubtype}`
const basePath = comfyPage.templates.getTemplatePath(baseMedia)
// Check base thumbnail
expect(
fs.existsSync(basePath),
`Missing base thumbnail: ${baseMedia}`
).toBe(true)
// Check second thumbnail for variants that need it
if (
thumbnailVariant === 'compareSlider' ||
thumbnailVariant === 'hoverDissolve'
) {
const secondMedia = `${name}-2.${mediaSubtype}`
const secondPath = comfyPage.templates.getTemplatePath(secondMedia)
expect(
fs.existsSync(secondPath),
`Missing second thumbnail: ${secondMedia} required for ${thumbnailVariant}`
).toBe(true)
}
}
})
test('Can load template workflows', async ({ comfyPage }) => {
// This test will need expanding on once the templates are decided
// Clear the workflow
await comfyPage.menu.workflowsTab.open()
await comfyPage.menu.workflowsTab.newBlankWorkflowButton.click()
@@ -28,4 +73,17 @@ test.describe('Templates', () => {
expect(await comfyPage.getGraphNodesCount()).toBeGreaterThan(0)
}).toPass({ timeout: 250 })
})
test('dialog should be automatically shown to first-time users', async ({
comfyPage
}) => {
// Set the tutorial as not completed to mark the user as a first-time user
await comfyPage.setSetting('Comfy.TutorialCompleted', false)
// Load the page
await comfyPage.setup({ clearStorage: true })
// Expect the templates dialog to be shown
expect(await comfyPage.templates.content.isVisible()).toBe(true)
})
})

View File

@@ -26,6 +26,33 @@ test.describe('Combo text widget', () => {
await comfyPage.resizeLoadCheckpointNode(0.8, 1, true)
await expect(comfyPage.canvas).toHaveScreenshot('resized-to-original.png')
})
test('should refresh combo values of optional inputs', async ({
comfyPage
}) => {
const getComboValues = async () =>
comfyPage.page.evaluate(() => {
return window['app'].graph.nodes
.find((node) => node.title === 'Node With Optional Combo Input')
.widgets.find((widget) => widget.name === 'optional_combo_input')
.options.values
})
await comfyPage.loadWorkflow('optional_combo_input')
const initialComboValues = await getComboValues()
// Focus canvas
await comfyPage.page.mouse.click(400, 300)
// Press R to trigger refresh
await comfyPage.page.keyboard.press('r')
// Wait for nodes' widgets to be updated
await comfyPage.nextFrame()
const refreshedComboValues = await getComboValues()
expect(refreshedComboValues).not.toEqual(initialComboValues)
})
})
test.describe('Boolean widget', () => {

4
comfyui_frontend_package/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
comfyui_frontend_package/static/*
comfyui_frontend_package.egg-info/*
__pycache__/

View File

@@ -0,0 +1 @@
recursive-include comfyui_frontend_package/static *

View File

@@ -0,0 +1,13 @@
# comfyui_frontend pypi package
This is the pypi package structure for the comfyui frontend.
During build process, the compiled assets are copied into the `${PROJECT_ROOT}/comfyui_frontend_package/comfyui_frontend_package/static` directory.
The package can be installed with the following command:
```bash
pip install comfyui-frontend-package
```
Ref: <https://pypi.org/project/comfyui-frontend-package/>

View File

@@ -0,0 +1,11 @@
import os
from setuptools import setup, find_packages
setup(
name="comfyui_frontend_package",
version=os.getenv("COMFYUI_FRONTEND_VERSION") or "0.1.0",
packages=find_packages(),
include_package_data=True,
install_requires=[],
python_requires=">=3.9",
)

View File

@@ -1,27 +0,0 @@
import type { JestConfigWithTsJest } from 'ts-jest'
const jestConfig: JestConfigWithTsJest = {
testMatch: ['**/tests-ui/**/*.test.ts'],
testEnvironment: 'jsdom',
moduleFileExtensions: ['js', 'jsx', 'json', 'vue', 'ts', 'tsx'],
transform: {
'^.+\\.vue$': '@vue/vue3-jest',
'^.+\\.m?[tj]sx?$': [
'ts-jest',
{
tsconfig: './tsconfig.json',
babelConfig: './babel.config.json'
}
]
},
transformIgnorePatterns: ['/node_modules/(?!(three|@three)/)'],
moduleNameMapper: {
'^@/(.*)$': '<rootDir>/src/$1',
'\\.(css|less|scss|sass)$': 'identity-obj-proxy'
},
clearMocks: true,
resetModules: true,
setupFiles: ['./tests-ui/tests/globalSetup.ts']
}
export default jestConfig

5820
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
{
"name": "@comfyorg/comfyui-frontend",
"private": true,
"version": "1.11.2",
"version": "1.12.3",
"type": "module",
"repository": "https://github.com/Comfy-Org/ComfyUI_frontend",
"homepage": "https://comfy.org",
@@ -12,16 +12,12 @@
"dev:electron": "vite --config vite.electron.config.mts",
"build": "npm run typecheck && vite build",
"build:types": "vite build --config vite.types.config.mts && node scripts/prepare-types.js",
"deploy": "npm run build && node scripts/deploy.js",
"release": "node scripts/release.js",
"update-litegraph": "node scripts/update-litegraph.js",
"zipdist": "node scripts/zipdist.js",
"typecheck": "vue-tsc --noEmit && tsc --noEmit && tsc-strict",
"format": "prettier --write './**/*.{js,ts,tsx,vue,mts}'",
"format:check": "prettier --check './**/*.{js,ts,tsx,vue,mts}'",
"test:jest": "jest --config jest.config.ts",
"test:generate": "npx tsx tests-ui/setup",
"test:browser": "npx playwright test",
"test:unit": "vitest run tests-ui/tests",
"test:component": "vitest run src/components/",
"prepare": "husky || true",
"preview": "vite preview",
@@ -29,27 +25,23 @@
"lint:fix": "eslint src --fix",
"locale": "lobe-i18n locale",
"collect-i18n": "playwright test --config=playwright.i18n.config.ts",
"json-schema": "tsx scripts/generate-json-schema.ts"
"json-schema": "tsx scripts/generate-json-schema.ts",
"fetch-templates": "tsx scripts/fetch-templates.ts"
},
"devDependencies": {
"@babel/core": "^7.24.7",
"@babel/preset-env": "^7.22.20",
"@eslint/js": "^9.8.0",
"@iconify/json": "^2.2.245",
"@lobehub/i18n-cli": "^1.20.0",
"@pinia/testing": "^0.1.5",
"@playwright/test": "^1.44.1",
"@trivago/prettier-plugin-sort-imports": "^5.2.0",
"@types/jest": "^29.5.12",
"@types/fs-extra": "^11.0.4",
"@types/lodash": "^4.17.6",
"@types/node": "^20.14.8",
"@types/three": "^0.169.0",
"@vitejs/plugin-vue": "^5.1.4",
"@vue/test-utils": "^2.4.6",
"@vue/vue3-jest": "^29.2.6",
"autoprefixer": "^10.4.19",
"babel-plugin-transform-import-meta": "^2.2.1",
"babel-plugin-transform-rename-import": "^2.3.0",
"chalk": "^5.3.0",
"eslint": "^9.12.0",
"eslint-plugin-unused-imports": "^4.1.4",
@@ -59,14 +51,10 @@
"happy-dom": "^15.11.0",
"husky": "^9.0.11",
"identity-obj-proxy": "^3.0.0",
"jest": "^29.7.0",
"jest-environment-jsdom": "^29.7.0",
"lint-staged": "^15.2.7",
"postcss": "^8.4.39",
"prettier": "^3.3.2",
"tailwindcss": "^3.4.4",
"ts-jest": "^29.1.4",
"ts-node": "^10.9.2",
"tsx": "^4.15.6",
"typescript": "^5.4.5",
"typescript-eslint": "^8.0.0",
@@ -82,9 +70,10 @@
"zod-to-json-schema": "^3.24.1"
},
"dependencies": {
"@alloc/quick-lru": "^5.2.0",
"@atlaskit/pragmatic-drag-and-drop": "^1.3.1",
"@comfyorg/comfyui-electron-types": "^0.4.20",
"@comfyorg/litegraph": "^0.8.94",
"@comfyorg/litegraph": "^0.9.6",
"@primevue/forms": "^4.2.5",
"@primevue/themes": "^4.2.5",
"@sentry/vue": "^8.48.0",

View File

@@ -7,7 +7,7 @@ const config: PlaywrightTestConfig = {
headless: true
},
reporter: 'list',
timeout: 10000,
timeout: 60000,
testMatch: /collect-i18n-.*\.ts/
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 373 B

After

Width:  |  Height:  |  Size: 756 B

View File

@@ -1,673 +0,0 @@
{
"last_node_id": 40,
"last_link_id": 38,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 14
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "ControlNetApply",
"pos": [340, 267],
"size": [317.4, 98],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 10
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 11
},
{
"name": "image",
"type": "IMAGE",
"link": 12
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1280, 704, 1]
},
{
"id": 17,
"type": "VAEDecode",
"pos": [2527, 369],
"size": [210, 46],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 18
},
{
"name": "vae",
"type": "VAE",
"link": 20
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [19],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [-8, 607],
"size": [425.28, 180.61],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 35
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 38
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 13
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 16],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1071823866653712,
"randomize",
10,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 35,
"type": "CLIPTextEncode",
"pos": [1310, -72],
"size": [425.28, 180.61],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 33
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-223, -93],
"size": [422.85, 164.31],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 36
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo) girl (flat chest:0.9), (fennec ears:1.0)\u00a0 (fox ears:1.0), (messy hair), eyes, standing (school uniform sweater) sky clouds nature national park beautiful winter snow (scenery HDR landscape)\n(sunset)\n"
]
},
{
"id": 12,
"type": "LoadImage",
"pos": [-280, 287],
"size": [365, 314],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [12],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["pose_worship.png", "image"]
},
{
"id": 13,
"type": "VAELoader",
"pos": [1098, 599],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [14, 20],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["kl-f8-anime2.ckpt"]
},
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [1321, -395],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 32
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [21],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"girl (flat chest:0.9), (fennec ears:0.8)\u00a0 (fox ears:0.8), (messy hair), (highlights), (realistic starry eyes pupil:1.1), standing (school uniform sweater)\nsky clouds nature national park beautiful winter snow scenery HDR landscape\n\n(sunset)\n\n"
]
},
{
"id": 36,
"type": "CheckpointLoaderSimple",
"pos": [570, -206],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [29],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [34],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AOM3A3.safetensors"]
},
{
"id": 37,
"type": "CLIPSetLastLayer",
"pos": [933, -183],
"size": [315, 58],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 34,
"slot_index": 0
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [32, 33],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 38,
"type": "CLIPSetLastLayer",
"pos": [-733, 375],
"size": [315, 58],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 37
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [35, 36],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 39,
"type": "CheckpointLoaderSimple",
"pos": [-1100, 302],
"size": [315, 98],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [38],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [37],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 14,
"type": "LatentUpscale",
"pos": [1486, 494],
"size": [315, 130],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 16
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1920, 1088, "disabled"]
},
{
"id": 18,
"type": "SaveImage",
"pos": [2769, 370],
"size": [357.86, 262.24],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 19
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 16,
"type": "KSampler",
"pos": [2011, 248],
"size": [315, 262],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 29
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 21
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 28
},
{
"name": "latent_image",
"type": "LATENT",
"link": 17
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
284006177305237,
"randomize",
8,
5,
"dpmpp_sde",
"simple",
0.52
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [353.07, 252.57],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 11,
"type": "ControlNetLoader",
"pos": [-250, 151],
"size": [450.9, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [11],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_openpose_fp16.safetensors"]
},
{
"id": 40,
"type": "MarkdownNote",
"pos": [-1095, 480],
"size": [225, 60],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#2-pass-pose-worship)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 6, 0, 10, 0, "CONDITIONING"],
[11, 11, 0, 10, 1, "CONTROL_NET"],
[12, 12, 0, 10, 2, "IMAGE"],
[13, 10, 0, 3, 1, "CONDITIONING"],
[14, 13, 0, 8, 1, "VAE"],
[16, 3, 0, 14, 0, "LATENT"],
[17, 14, 0, 16, 3, "LATENT"],
[18, 16, 0, 17, 0, "LATENT"],
[19, 17, 0, 18, 0, "IMAGE"],
[20, 13, 0, 17, 1, "VAE"],
[21, 15, 0, 16, 1, "CONDITIONING"],
[28, 35, 0, 16, 2, "CONDITIONING"],
[29, 36, 0, 16, 0, "MODEL"],
[32, 37, 0, 15, 0, "CLIP"],
[33, 37, 0, 35, 0, "CLIP"],
[34, 36, 1, 37, 0, "CLIP"],
[35, 38, 0, 7, 0, "CLIP"],
[36, 38, 0, 6, 0, "CLIP"],
[37, 39, 1, 38, 0, "CLIP"],
[38, 39, 0, 3, 0, "MODEL"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.88,
"offset": [1252.62, 517.93]
}
},
"version": 0.4,
"models": [
{
"name": "control_v11p_sd15_openpose_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_openpose_fp16.safetensors",
"directory": "controlnet"
}
]
}

View File

@@ -1,966 +0,0 @@
{
"last_node_id": 48,
"last_link_id": 113,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1320, 302],
"size": [210, 46],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1575, 350],
"size": [210, 250],
"flags": {},
"order": 26,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [175, 496],
"size": [425.28, 180.61],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 100
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 12,
"type": "ConditioningCombine",
"pos": [834, -246],
"size": [342.6, 46],
"flags": {
"collapsed": false
},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 63
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 57
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAELoader",
"pos": [1041, 544],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [36, 51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 35,
"type": "ConditioningCombine",
"pos": [873, -705],
"size": [342.6, 46],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 61
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [63],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [695, 531],
"size": [315, 106],
"flags": {
"collapsed": false
},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [704, 1280, 1]
},
{
"id": 22,
"type": "LatentUpscale",
"pos": [1412, 79],
"size": [315, 130],
"flags": {},
"order": 25,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 41
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1088, 1920, "disabled"]
},
{
"id": 14,
"type": "CLIPTextEncode",
"pos": [-4, -994],
"size": [400, 200],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [95],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (night:1.3) (darkness) sky (black) (stars:1.2) (galaxy:1.2) (space) (universe)"
]
},
{
"id": 18,
"type": "ConditioningSetArea",
"pos": [482, -709],
"size": [312, 154],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 96
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [62],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 320, 1]
},
{
"id": 31,
"type": "VAEDecode",
"pos": [2783.3, -41],
"size": [210, 46],
"flags": {},
"order": 28,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 50
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [87],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 32,
"type": "SaveImage",
"pos": [3012.3, -42],
"size": [315, 250],
"flags": {},
"order": 29,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 87
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "KSampler",
"pos": [2421.3, -389],
"size": [315, 262],
"flags": {},
"order": 27,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 108
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 47
},
{
"name": "latent_image",
"type": "LATENT",
"link": 42
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [50],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1122440447966177,
"randomize",
14,
7,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [152, 265],
"size": [422.85, 164.31],
"flags": {
"collapsed": false
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 99
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [93],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(masterpiece) (best quality) beautiful landscape breathtaking amazing view nature photograph forest mountains ocean (sky) national park scenery"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [885, 136],
"size": [315, 262],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 107
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 54
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 41],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
335608130539327,
"randomize",
13,
8.5,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 11,
"type": "ConditioningSetArea",
"pos": [479, -454],
"size": [314, 154],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 97,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 512, 1]
},
{
"id": 19,
"type": "ConditioningCombine",
"pos": [1180, -151],
"size": [342.6, 46],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 58
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 94
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [54],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 10,
"type": "ConditioningCombine",
"pos": [803, -149],
"size": [342.6, 46],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 40
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 93
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [94],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 15,
"type": "ConditioningSetArea",
"pos": [466, -233],
"size": [299, 154],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 704, 1]
},
{
"id": 13,
"type": "CLIPTextEncode",
"pos": [-5, -729],
"size": [400, 200],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 103
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [96],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (evening:1.2) (sky:1.2) (clouds) (colorful) (HDR:1.2) (sunset:1.3)\n"
]
},
{
"id": 17,
"type": "CLIPTextEncode",
"pos": [11, -455],
"size": [400, 200],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(best quality) (daytime:1.2) sky (blue)\n"]
},
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [16, -217],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 101
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [98],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(masterpiece) (best quality) morning sky\n\n"]
},
{
"id": 34,
"type": "ConditioningSetArea",
"pos": [476, -932],
"size": [312, 154],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 95,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [61],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 0, 1.2]
},
{
"id": 44,
"type": "CLIPSetLastLayer",
"pos": [-363, 453],
"size": [315, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 106
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [99, 100, 101, 102, 103, 104],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 45,
"type": "CheckpointLoaderSimple",
"pos": [-849, 429],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [107],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [106],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 27,
"type": "CLIPTextEncode",
"pos": [1969.3, -336],
"size": [400, 200],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 113
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 26,
"type": "CLIPTextEncode",
"pos": [1965, -580],
"size": [400, 200],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 112
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) beautiful (HDR:1.2) (realistic:1.2) landscape breathtaking amazing view nature scenery photograph forest mountains ocean daytime night evening morning, (sky:1.2)\n"
]
},
{
"id": 47,
"type": "CLIPSetLastLayer",
"pos": [1569, -403],
"size": [315, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 111
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [112, 113],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 46,
"type": "CheckpointLoaderSimple",
"pos": [1217, -496],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [108],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [111],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AbyssOrangeMix2_hard.safetensors"]
},
{
"id": 48,
"type": "MarkdownNote",
"pos": [-840, 585],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[36, 20, 0, 8, 1, "VAE"],
[40, 15, 0, 10, 0, "CONDITIONING"],
[41, 3, 0, 22, 0, "LATENT"],
[42, 22, 0, 24, 3, "LATENT"],
[46, 26, 0, 24, 1, "CONDITIONING"],
[47, 27, 0, 24, 2, "CONDITIONING"],
[49, 8, 0, 9, 0, "IMAGE"],
[50, 24, 0, 31, 0, "LATENT"],
[51, 20, 0, 31, 1, "VAE"],
[54, 19, 0, 3, 1, "CONDITIONING"],
[57, 11, 0, 12, 1, "CONDITIONING"],
[58, 12, 0, 19, 0, "CONDITIONING"],
[61, 34, 0, 35, 0, "CONDITIONING"],
[62, 18, 0, 35, 1, "CONDITIONING"],
[63, 35, 0, 12, 0, "CONDITIONING"],
[87, 31, 0, 32, 0, "IMAGE"],
[93, 6, 0, 10, 1, "CONDITIONING"],
[94, 10, 0, 19, 1, "CONDITIONING"],
[95, 14, 0, 34, 0, "CONDITIONING"],
[96, 13, 0, 18, 0, "CONDITIONING"],
[97, 17, 0, 11, 0, "CONDITIONING"],
[98, 33, 0, 15, 0, "CONDITIONING"],
[99, 44, 0, 6, 0, "CLIP"],
[100, 44, 0, 7, 0, "CLIP"],
[101, 44, 0, 33, 0, "CLIP"],
[102, 44, 0, 17, 0, "CLIP"],
[103, 44, 0, 13, 0, "CLIP"],
[104, 44, 0, 14, 0, "CLIP"],
[106, 45, 1, 44, 0, "CLIP"],
[107, 45, 0, 3, 0, "MODEL"],
[108, 46, 0, 24, 0, "MODEL"],
[111, 46, 1, 47, 0, "CLIP"],
[112, 47, 0, 26, 0, "CLIP"],
[113, 47, 0, 27, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.44,
"offset": [1558.38, 1652.18]
}
},
"version": 0.4,
"models": [
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -1,967 +0,0 @@
{
"last_node_id": 48,
"last_link_id": 114,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1320, 302],
"size": [210, 46],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1575, 350],
"size": [210, 58],
"flags": {},
"order": 26,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [175, 496],
"size": [425.28, 180.61],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 106
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 12,
"type": "ConditioningCombine",
"pos": [834, -246],
"size": [342.6, 46],
"flags": {
"collapsed": false
},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 63
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 57
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAELoader",
"pos": [1041, 544],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [36, 51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 35,
"type": "ConditioningCombine",
"pos": [873, -705],
"size": [342.6, 46],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 61
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [63],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [695, 531],
"size": [315, 106],
"flags": {
"collapsed": false
},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [704, 1280, 1]
},
{
"id": 22,
"type": "LatentUpscale",
"pos": [1412, 79],
"size": [315, 130],
"flags": {},
"order": 25,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 41
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1088, 1920, "disabled"]
},
{
"id": 14,
"type": "CLIPTextEncode",
"pos": [-4, -994],
"size": [400, 200],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 110
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [89],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (night:1.3) (darkness) sky (black) (stars:1.2) (galaxy:1.2) (space) (universe)"
]
},
{
"id": 13,
"type": "CLIPTextEncode",
"pos": [-5, -729],
"size": [400, 200],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 109
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [91],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (evening:1.2) (sky:1.2) (clouds) (colorful) (HDR:1.2) (sunset:1.3)\n"
]
},
{
"id": 17,
"type": "CLIPTextEncode",
"pos": [11, -455],
"size": [400, 200],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 108
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [90],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(best quality) (daytime:1.2) sky (blue)\n"]
},
{
"id": 18,
"type": "ConditioningSetArea",
"pos": [482, -709],
"size": [312, 154],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 90
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [62],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 320, 1]
},
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [16, -217],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 107
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [92],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(masterpiece) (best quality) morning sky\n\n"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [152, 265],
"size": [422.85, 164.31],
"flags": {
"collapsed": false
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 105
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [93],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(masterpiece) (best quality) beautiful landscape breathtaking amazing view nature photograph forest mountains ocean (sky) national park scenery"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [885, 136],
"size": [315, 262],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 104
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 54
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 41],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
823155751257884,
"randomize",
13,
8.5,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 11,
"type": "ConditioningSetArea",
"pos": [479, -454],
"size": [314, 154],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 91,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 512, 1]
},
{
"id": 19,
"type": "ConditioningCombine",
"pos": [1180, -151],
"size": [342.6, 46],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 58
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 94
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [54],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 10,
"type": "ConditioningCombine",
"pos": [803, -149],
"size": [342.6, 46],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 40
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 93
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [94],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 34,
"type": "ConditioningSetArea",
"pos": [476, -932],
"size": [312, 154],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 92,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [61],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 0, 1]
},
{
"id": 15,
"type": "ConditioningSetArea",
"pos": [466, -233],
"size": [299, 154],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 89
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 704, 1.5]
},
{
"id": 44,
"type": "CheckpointLoaderSimple",
"pos": [-703, 444],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [104],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [111],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 46,
"type": "CLIPSetLastLayer",
"pos": [-354, 244],
"size": [315, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 111,
"slot_index": 0
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [105, 106, 107, 108, 109, 110],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 24,
"type": "KSampler",
"pos": [2220, -398],
"size": [315, 262],
"flags": {},
"order": 27,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 95
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 47
},
{
"name": "latent_image",
"type": "LATENT",
"link": 42
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [50],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
418330692116968,
"randomize",
14,
7,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 32,
"type": "SaveImage",
"pos": [2825, -62],
"size": [315, 58],
"flags": {},
"order": 29,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 87
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 31,
"type": "VAEDecode",
"pos": [2590, -61],
"size": [210, 46],
"flags": {},
"order": 28,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 50
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [87],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 26,
"type": "CLIPTextEncode",
"pos": [1781, -571],
"size": [400, 200],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 113
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) beautiful (HDR:1.2) (realistic:1.2) landscape breathtaking amazing view nature scenery photograph forest mountains ocean daytime night evening morning, (sky:1.2)\n"
]
},
{
"id": 27,
"type": "CLIPTextEncode",
"pos": [1787, -317],
"size": [400, 200],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 114
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 47,
"type": "CLIPSetLastLayer",
"pos": [1407, -402],
"size": [315, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 112
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [113, 114],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 45,
"type": "CheckpointLoaderSimple",
"pos": [1074, -444],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [95],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [112],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AbyssOrangeMix2_hard.safetensors"]
},
{
"id": 48,
"type": "MarkdownNote",
"pos": [-690, 615],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[36, 20, 0, 8, 1, "VAE"],
[40, 15, 0, 10, 0, "CONDITIONING"],
[41, 3, 0, 22, 0, "LATENT"],
[42, 22, 0, 24, 3, "LATENT"],
[46, 26, 0, 24, 1, "CONDITIONING"],
[47, 27, 0, 24, 2, "CONDITIONING"],
[49, 8, 0, 9, 0, "IMAGE"],
[50, 24, 0, 31, 0, "LATENT"],
[51, 20, 0, 31, 1, "VAE"],
[54, 19, 0, 3, 1, "CONDITIONING"],
[57, 11, 0, 12, 1, "CONDITIONING"],
[58, 12, 0, 19, 0, "CONDITIONING"],
[61, 34, 0, 35, 0, "CONDITIONING"],
[62, 18, 0, 35, 1, "CONDITIONING"],
[63, 35, 0, 12, 0, "CONDITIONING"],
[87, 31, 0, 32, 0, "IMAGE"],
[89, 14, 0, 15, 0, "CONDITIONING"],
[90, 17, 0, 18, 0, "CONDITIONING"],
[91, 13, 0, 11, 0, "CONDITIONING"],
[92, 33, 0, 34, 0, "CONDITIONING"],
[93, 6, 0, 10, 1, "CONDITIONING"],
[94, 10, 0, 19, 1, "CONDITIONING"],
[95, 45, 0, 24, 0, "MODEL"],
[104, 44, 0, 3, 0, "MODEL"],
[105, 46, 0, 6, 0, "CLIP"],
[106, 46, 0, 7, 0, "CLIP"],
[107, 46, 0, 33, 0, "CLIP"],
[108, 46, 0, 17, 0, "CLIP"],
[109, 46, 0, 13, 0, "CLIP"],
[110, 46, 0, 14, 0, "CLIP"],
[111, 44, 1, 46, 0, "CLIP"],
[112, 45, 1, 47, 0, "CLIP"],
[113, 47, 0, 26, 0, "CLIP"],
[114, 47, 0, 27, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.79,
"offset": [1022.96, -230.7]
}
},
"version": 0.4,
"models": [
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -1,620 +0,0 @@
{
"last_node_id": 50,
"last_link_id": 108,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1320, 302],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [175, 496],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 101
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 27,
"type": "CLIPTextEncode",
"pos": [1570, -336],
"size": [400, 200],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 103
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 22,
"type": "LatentUpscale",
"pos": [1412, 79],
"size": [315, 130],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 41
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1920, 1088, "disabled"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [695, 531],
"size": [315, 106],
"flags": {
"collapsed": false
},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1280, 704, 1]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1556, 303],
"size": [210, 250],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [156, 269],
"size": [422.85, 164.31],
"flags": {
"collapsed": false
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [98],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo:1.3) (best quality) (HDR:1.0) colourful, nature wilderness snow mountain peak, (winter:1.2), on landscape mountain in Switzerland alps sunset, aerial view (cityscape:1.3) skyscrapers modern city satellite view, (sunset)\ngirl with fennec ears fox ears, sweater, sitting\n"
]
},
{
"id": 47,
"type": "ConditioningCombine",
"pos": [530, 71],
"size": [342.6, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 97
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [99],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 45,
"type": "CLIPTextEncode",
"pos": [-88, -224],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 105,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [93],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo:1.3) (best quality) (HDR:1.0) girl colourful of (flat chest:0.9), (fennec ears:1.0)\u00a0 (fox ears:1.0), blonde twintails medium (messy hair:1.2), (eyes:1.0), sweater, (pink:0.8) , long sleeves, sweatpants (pants), gloves, nature wilderness (sitting:1.3) on snow mountain peak, (:d:0.5) (blush:0.9), (winter:1.2), on landscape mountain in Switzerland alps sunset, comfortable, (spread legs:1.1), aerial view (cityscape:1.3) skyscrapers modern city satellite view, (sunset)"
]
},
{
"id": 31,
"type": "VAEDecode",
"pos": [2419, 10],
"size": [210, 46],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 50
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 46,
"type": "ConditioningSetArea",
"pos": [344, -227],
"size": [317.4, 154],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 93
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [640, 640, 0, 64, 1]
},
{
"id": 26,
"type": "CLIPTextEncode",
"pos": [1573, -583],
"size": [400, 200],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece solo (realistic) (best quality) (HDR:1.0) girl colourful of (flat chest:0.9), (fox ears:0.9), blonde twintails messy hair, (eyes:1.0), sweater, (pink:0.8) , long sleeves, sweatpants pants, gloves, nature wilderness sitting on snow mountain peak aerial view, (:d:0.5) (blush:0.9), (winter:0.9), mountain in Switzerland, comfortable, aerial view (cityscape:1.2) skyscrapers modern city satellite view, (sunset)\n"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [885, 136],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 106
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 99
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 41],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
830459492315490,
"randomize",
13,
7,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 32,
"type": "SaveImage",
"pos": [2648, -11],
"size": [210, 250],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 100
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "KSampler",
"pos": [2047, -270],
"size": [315, 262],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 107
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 47
},
{
"name": "latent_image",
"type": "LATENT",
"link": 42
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [50],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
626842672818096,
"randomize",
7,
5,
"dpmpp_sde",
"simple",
0.52
]
},
{
"id": 20,
"type": "VAELoader",
"pos": [1086, 563],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [36, 51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 49,
"type": "CLIPSetLastLayer",
"pos": [-227, 630],
"size": [315, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 108
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [101, 102, 103, 104, 105],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 48,
"type": "CheckpointLoaderSimple",
"pos": [-621, 603],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [106, 107],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [108],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 50,
"type": "MarkdownNote",
"pos": [-615, 765],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/#increasing-consistency-of-images-with-area-composition)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[36, 20, 0, 8, 1, "VAE"],
[41, 3, 0, 22, 0, "LATENT"],
[42, 22, 0, 24, 3, "LATENT"],
[46, 26, 0, 24, 1, "CONDITIONING"],
[47, 27, 0, 24, 2, "CONDITIONING"],
[49, 8, 0, 9, 0, "IMAGE"],
[50, 24, 0, 31, 0, "LATENT"],
[51, 20, 0, 31, 1, "VAE"],
[93, 45, 0, 46, 0, "CONDITIONING"],
[97, 46, 0, 47, 0, "CONDITIONING"],
[98, 6, 0, 47, 1, "CONDITIONING"],
[99, 47, 0, 3, 1, "CONDITIONING"],
[100, 31, 0, 32, 0, "IMAGE"],
[101, 49, 0, 7, 0, "CLIP"],
[102, 49, 0, 6, 0, "CLIP"],
[103, 49, 0, 27, 0, "CLIP"],
[104, 49, 0, 26, 0, "CLIP"],
[105, 49, 0, 45, 0, "CLIP"],
[106, 48, 0, 3, 0, "MODEL"],
[107, 48, 0, 24, 0, "MODEL"],
[108, 48, 1, 49, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.53,
"offset": [1214.17, 1188.8]
}
},
"version": 0.4,
"models": [
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -1,385 +0,0 @@
{
"last_node_id": 15,
"last_link_id": 21,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1210, 250],
"size": [210, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 14
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-42, -147],
"size": [422.85, 164.31],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 21
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo) girl (flat chest:0.9), (fennec ears:1.1)\u00a0 (fox ears:1.1), (blonde hair:1.0), messy hair, sky clouds, standing in a grass field, (chibi), blue eyes"
]
},
{
"id": 12,
"type": "ControlNetLoader",
"pos": [-50, 69],
"size": [422, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_scribble_fp16.safetensors"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [355, 213],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [16],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis)"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [439, 446],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 13,
"type": "VAELoader",
"pos": [833, 484],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [14],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 10,
"type": "ControlNetApply",
"pos": [459, 51],
"size": [317.4, 98],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 10
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 13
},
{
"name": "image",
"type": "IMAGE",
"link": 12
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [0.9]
},
{
"id": 11,
"type": "LoadImage",
"pos": [-70, 177],
"size": [387.97, 465.51],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [12],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["input_scribble_example.png", "image"]
},
{
"id": 14,
"type": "CheckpointLoaderSimple",
"pos": [-448, 231],
"size": [315, 98],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [19],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [20, 21],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1453, 247],
"size": [393.62, 449.16],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [842, 150],
"size": [315, 262],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 19
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 18
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 16
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1002496614778823,
"randomize",
16,
6,
"uni_pc",
"normal",
1
]
},
{
"id": 15,
"type": "MarkdownNote",
"pos": [-450, 375],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 6, 0, 10, 0, "CONDITIONING"],
[12, 11, 0, 10, 2, "IMAGE"],
[13, 12, 0, 10, 1, "CONTROL_NET"],
[14, 13, 0, 8, 1, "VAE"],
[16, 7, 0, 3, 2, "CONDITIONING"],
[18, 10, 0, 3, 1, "CONDITIONING"],
[19, 14, 0, 3, 0, "MODEL"],
[20, 14, 1, 7, 0, "CLIP"],
[21, 14, 1, 6, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.8,
"offset": [843.77, 555.93]
}
},
"version": 0.4,
"models": [
{
"name": "control_v11p_sd15_scribble_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_scribble_fp16.safetensors?download=true",
"directory": "controlnet"
},
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

View File

@@ -1,356 +0,0 @@
{
"last_node_id": 9,
"last_link_id": 9,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [
413,
389
],
"size": [
425.27801513671875,
180.6060791015625
],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
6
],
"slot_index": 0
}
],
"properties": {},
"widgets_values": [
"text, watermark"
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
415,
186
],
"size": [
422.84503173828125,
164.31304931640625
],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
4
],
"slot_index": 0
}
],
"properties": {},
"widgets_values": [
"beautiful scenery nature glass bottle landscape, , purple galaxy bottle,"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [
473,
609
],
"size": [
315,
106
],
"flags": {},
"order": 1,
"mode": 0,
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
2
],
"slot_index": 0
}
],
"properties": {},
"widgets_values": [
512,
512,
1
]
},
{
"id": 3,
"type": "KSampler",
"pos": [
863,
186
],
"size": [
315,
262
],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
7
],
"slot_index": 0
}
],
"properties": {},
"widgets_values": [
156680208700286,
true,
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
1209,
188
],
"size": [
210,
46
],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
9
],
"slot_index": 0
}
],
"properties": {}
},
{
"id": 9,
"type": "SaveImage",
"pos": [
1451,
189
],
"size": [
210,
26
],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"properties": {}
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [
26,
474
],
"size": [
315,
98
],
"flags": {},
"order": 0,
"mode": 0,
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
1
],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [
3,
5
],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
8
],
"slot_index": 2
}
],
"properties": {},
"widgets_values": [
"v1-5-pruned-emaonly-fp16.safetensors"
]
}
],
"links": [
[
1,
4,
0,
3,
0,
"MODEL"
],
[
2,
5,
0,
3,
3,
"LATENT"
],
[
3,
4,
1,
6,
0,
"CLIP"
],
[
4,
6,
0,
3,
1,
"CONDITIONING"
],
[
5,
4,
1,
7,
0,
"CLIP"
],
[
6,
7,
0,
3,
2,
"CONDITIONING"
],
[
7,
3,
0,
8,
0,
"LATENT"
],
[
8,
4,
2,
8,
1,
"VAE"
],
[
9,
8,
0,
9,
0,
"IMAGE"
]
],
"groups": [],
"config": {},
"extra": {},
"version": 0.4,
"models": [{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
}]
}

View File

@@ -1,378 +0,0 @@
{
"last_node_id": 35,
"last_link_id": 52,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1053, 172],
"size": [210, 46],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 23,
"type": "ControlNetApply",
"pos": [593.6, -388.0],
"size": [317.4, 98],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 42
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 47
},
{
"name": "image",
"type": "IMAGE",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1.0]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [259, 463],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [832, 384, 1]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [169, 212],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 49
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2)"
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1310, 169],
"size": [516.05, 301.24],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 33,
"type": "DiffControlNetLoader",
"pos": [131, -338],
"size": [421.93, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 48
}
],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DiffControlNetLoader"
},
"widgets_values": ["diff_control_sd15_depth_fp16.safetensors"]
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-305, -435],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 50
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["underwater photograph shark\n\n\n\n"]
},
{
"id": 3,
"type": "KSampler",
"pos": [699, 167],
"size": [315, 262],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 52
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
891858402356003,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 20,
"type": "LoadImage",
"pos": [135, -234],
"size": [429.73, 314],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [34],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["shark_depthmap.png", "image"]
},
{
"id": 34,
"type": "CheckpointLoaderSimple",
"pos": [-281, 110],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [48, 52],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [49, 50],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [51],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 35,
"type": "MarkdownNote",
"pos": [-270, 255],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#t2i-adapter-vs-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[34, 20, 0, 23, 2, "IMAGE"],
[40, 23, 0, 3, 1, "CONDITIONING"],
[42, 24, 0, 23, 0, "CONDITIONING"],
[47, 33, 0, 23, 1, "CONTROL_NET"],
[48, 34, 0, 33, 0, "MODEL"],
[49, 34, 1, 7, 0, "CLIP"],
[50, 34, 1, 24, 0, "CLIP"],
[51, 34, 2, 8, 1, "VAE"],
[52, 34, 0, 3, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Apply Depth ControlNet",
"bounding": [210, -480, 739, 336],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.86,
"offset": [671.97, 711.84]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "diff_control_sd15_depth_fp16.safetensors",
"url": "https://huggingface.co/kohya-ss/ControlNet-diff-modules/resolve/main/diff_control_sd15_depth_fp16.safetensors?download=true",
"directory": "controlnet"
}
]
}

View File

@@ -1,371 +0,0 @@
{
"last_node_id": 34,
"last_link_id": 49,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1053, 172],
"size": [210, 46],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 49
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [259, 463],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [832, 384, 1]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [169, 212],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 47
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2)"
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1310, 169],
"size": [516.05, 301.24],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [699, 167],
"size": [315, 262],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 46
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
891858402356003,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 23,
"type": "ControlNetApply",
"pos": [553, -289],
"size": [317.4, 98],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 42
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 44
},
{
"name": "image",
"type": "IMAGE",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1.0]
},
{
"id": 31,
"type": "ControlNetLoader",
"pos": [168, -286],
"size": [345, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [44],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["t2iadapter_depth_sd14v1.pth"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [88, -174],
"size": [413, 314],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [34],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["shark_depthmap.png", "image"]
},
{
"id": 33,
"type": "CheckpointLoaderSimple",
"pos": [-349, 161],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [46],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [47, 48],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [49],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-360, -261],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 48
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["underwater photograph shark\n\n\n\n"]
},
{
"id": 34,
"type": "MarkdownNote",
"pos": [-345, 300],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#t2i-adapter-vs-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[34, 20, 0, 23, 2, "IMAGE"],
[40, 23, 0, 3, 1, "CONDITIONING"],
[42, 24, 0, 23, 0, "CONDITIONING"],
[44, 31, 0, 23, 1, "CONTROL_NET"],
[46, 33, 0, 3, 0, "MODEL"],
[47, 33, 1, 7, 0, "CLIP"],
[48, 33, 1, 24, 0, "CLIP"],
[49, 33, 2, 8, 1, "VAE"]
],
"groups": [
{
"id": 1,
"title": "Apply Depth T2I-Adapter",
"bounding": [150, -375, 739, 336],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.84,
"offset": [737.68, 680.26]
}
},
"version": 0.4,
"models": [
{
"name": "t2iadapter_depth_sd14v1.pth",
"url": "https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_depth_sd14v1.pth?download=true",
"directory": "controlnet"
},
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,267 +0,0 @@
{
"last_node_id": 10,
"last_link_id": 9,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph in the style of embedding:SDA768.pt girl with blonde hair\nlandscape scenery view"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [26, 474],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [469, 528],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
193694018275622,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 10,
"type": "MarkdownNote",
"pos": [30, 630],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/textual_inversion_embeddings/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[3, 4, 1, 6, 0, "CLIP"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[5, 4, 1, 7, 0, "CLIP"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.84,
"offset": [498.31, 149.5]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,329 +0,0 @@
{
"last_node_id": 15,
"last_link_id": 19,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 18
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 17
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece best quality girl standing in victorian clothing"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 19
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1791, 169],
"size": [455.99, 553.09],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 16
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 14,
"type": "ImageUpscaleWithModel",
"pos": [1506, 151],
"size": [241.8, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "upscale_model",
"type": "UPSCALE_MODEL",
"link": 14
},
{
"name": "image",
"type": "IMAGE",
"link": 15
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [16],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageUpscaleWithModel"
},
"widgets_values": []
},
{
"id": 13,
"type": "UpscaleModelLoader",
"pos": [1128, 51],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "UPSCALE_MODEL",
"type": "UPSCALE_MODEL",
"links": [14],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UpscaleModelLoader"
},
"widgets_values": ["RealESRGAN_x4plus.pth"]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
833543590226030,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-11, 307],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [17, 18],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [19],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 15,
"type": "MarkdownNote",
"pos": [0, 465],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/upscale_models/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[14, 13, 0, 14, 0, "UPSCALE_MODEL"],
[15, 8, 0, 14, 1, "IMAGE"],
[16, 14, 0, 9, 0, "IMAGE"],
[17, 4, 1, 6, 0, "CLIP"],
[18, 4, 1, 7, 0, "CLIP"],
[19, 4, 2, 8, 1, "VAE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.82,
"offset": [400.67, 431.06]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "RealESRGAN_x4plus.pth",
"url": "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth",
"directory": "upscale_models"
}
]
}

View File

@@ -1,474 +0,0 @@
{
"last_node_id": 36,
"last_link_id": 70,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [68],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 17,
"type": "LoadImage",
"pos": [220, 530],
"size": [315, 314.0],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [49],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 19,
"type": "PreviewImage",
"pos": [899, 532],
"size": [571.59, 625.53],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 26
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [1290, 40],
"size": [315, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 57
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 64
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 65
},
{
"name": "latent_image",
"type": "LATENT",
"link": 66
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
50363905047731,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 35,
"type": "InstructPixToPixConditioning",
"pos": [1040, 50],
"size": [235.2, 86],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 67
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 68
},
{
"name": "vae",
"type": "VAE",
"link": 69
},
{
"name": "pixels",
"type": "IMAGE",
"link": 70
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [64],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [65],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [66],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InstructPixToPixConditioning"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 40],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1850, 40],
"size": [828.95, 893.85],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 32,
"type": "VAELoader",
"pos": [1290, 350],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 69],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [700, 50],
"size": [317.4, 58],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [67],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [30]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [260, 50],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a pink sweater and jeans"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-80, 110],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [710, -80],
"size": [315, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-canny-dev.safetensors", "default"]
},
{
"id": 18,
"type": "Canny",
"pos": [560, 530],
"size": [315, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 49
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [26, 70],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "Canny"
},
"widgets_values": [0.15, 0.3]
},
{
"id": 36,
"type": "MarkdownNote",
"pos": [-75, 270],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#canny-and-depth)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[26, 18, 0, 19, 0, "IMAGE"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[49, 17, 0, 18, 0, "IMAGE"],
[57, 31, 0, 3, 0, "MODEL"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[64, 35, 0, 3, 1, "CONDITIONING"],
[65, 35, 1, 3, 2, "CONDITIONING"],
[66, 35, 2, 3, 3, "LATENT"],
[67, 26, 0, 35, 0, "CONDITIONING"],
[68, 7, 0, 35, 1, "CONDITIONING"],
[69, 32, 0, 35, 2, "VAE"],
[70, 18, 0, 35, 3, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.67,
"offset": [553.16, 455.34]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-canny-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Canny-dev/resolve/main/flux1-canny-dev.safetensors?download=true",
"directory": "diffusion_models"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
}
]
}

View File

@@ -1,454 +0,0 @@
{
"last_node_id": 40,
"last_link_id": 76,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 98],
"size": [210, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [68],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-238, 112],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 17,
"type": "LoadImage",
"pos": [307, 342],
"size": [315, 314.0],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [71],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["shark_depthmap.png", "image"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [621, 8],
"size": [317.4, 58],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [67],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [10]
},
{
"id": 35,
"type": "InstructPixToPixConditioning",
"pos": [1018, 124],
"size": [235.2, 86],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 67
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 68
},
{
"name": "vae",
"type": "VAE",
"link": 69
},
{
"name": "pixels",
"type": "IMAGE",
"link": 71
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [64],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [65],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [73],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InstructPixToPixConditioning"
},
"widgets_values": []
},
{
"id": 32,
"type": "VAELoader",
"pos": [656, 165],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 69],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1865, 98],
"size": [722.41, 425.77],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 37,
"type": "LoraLoaderModelOnly",
"pos": [624, -172],
"size": [315, 82],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 74
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [76],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": ["flux1-depth-dev-lora.safetensors", 1]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [115, -17],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["a photograph of a shark in the sea"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 3,
"type": "KSampler",
"pos": [1280, 100],
"size": [315, 262],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 76
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 64
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 65
},
{
"name": "latent_image",
"type": "LATENT",
"link": 73
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
91050358797301,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [249, -171],
"size": [315, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [74],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-depth-dev.safetensors", "default"]
},
{
"id": 40,
"type": "MarkdownNote",
"pos": [-225, 270],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#canny-and-depth)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[64, 35, 0, 3, 1, "CONDITIONING"],
[65, 35, 1, 3, 2, "CONDITIONING"],
[67, 26, 0, 35, 0, "CONDITIONING"],
[68, 7, 0, 35, 1, "CONDITIONING"],
[69, 32, 0, 35, 2, "VAE"],
[71, 17, 0, 35, 3, "IMAGE"],
[73, 35, 2, 3, 3, "LATENT"],
[74, 31, 0, 37, 0, "MODEL"],
[76, 37, 0, 3, 0, "MODEL"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.65,
"offset": [724.57, 776.23]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-depth-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Depth-dev/resolve/main/flux1-depth-dev.safetensors?download=true",
"directory": "diffusion_models"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "flux1-depth-dev-lora.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Depth-dev-lora/resolve/main/flux1-depth-dev-lora.safetensors?download=true",
"directory": "loras"
}
]
}

View File

@@ -1,332 +0,0 @@
{
"last_node_id": 37,
"last_link_id": 57,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 192],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 45
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [56],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a maid outfit with a long black gold leaf pattern dress and a white apron mouth open placing a fancy black forest cake with candles on top of a dinner table of an old dark Victorian mansion lit by candlelight with a bright window to the foggy forest and very expensive stuff everywhere there are paintings on the walls"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1151, 195],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 52
},
{
"name": "vae",
"type": "VAE",
"link": 46
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1375, 194],
"size": [985.3, 1060.38],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [471, 455],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 30,
"type": "CheckpointLoaderSimple",
"pos": [48, 192],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [47],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [45, 54],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [46],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["flux1-dev-fp8.safetensors"]
},
{
"id": 31,
"type": "KSampler",
"pos": [816, 192],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 47
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 57
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 55
},
{
"name": "latent_image",
"type": "LATENT",
"link": 51
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [52],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
972054013131368,
"randomize",
20,
1,
"euler",
"simple",
1
]
},
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [390, 400],
"size": [422.85, 164.31],
"flags": {
"collapsed": true
},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 54,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [55],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 35,
"type": "FluxGuidance",
"pos": [576, 96],
"size": [211.6, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 56
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [3.5]
},
{
"id": 37,
"type": "MarkdownNote",
"pos": [60, 345],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-dev-1)"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 34,
"type": "Note",
"pos": [825, 510],
"size": [282.86, 164.08],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"Note that Flux dev and schnell do not have any negative prompt so CFG should be set to 1.0. Setting CFG to 1.0 means the negative prompt is ignored."
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[45, 30, 1, 6, 0, "CLIP"],
[46, 30, 2, 8, 1, "VAE"],
[47, 30, 0, 31, 0, "MODEL"],
[51, 27, 0, 31, 3, "LATENT"],
[52, 31, 0, 8, 0, "LATENT"],
[54, 30, 1, 33, 0, "CLIP"],
[55, 33, 0, 31, 2, "CONDITIONING"],
[56, 6, 0, 35, 0, "CONDITIONING"],
[57, 35, 0, 31, 1, "CONDITIONING"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.8,
"offset": [350.72, 161.55]
}
},
"version": 0.4,
"models": [
{
"name": "flux1-dev-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-dev/resolve/main/flux1-dev-fp8.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,771 +0,0 @@
{
"last_node_id": 38,
"last_link_id": 116,
"nodes": [
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [48, 288],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"t5xxl_fp16.safetensors",
"clip_l.safetensors",
"flux",
"default"
]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [480, 1008],
"size": [315, 106],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 55,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [20]
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": ["simple", 20, 1]
},
{
"id": 16,
"type": "KSamplerSelect",
"pos": [480, 912],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [19]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [480, 144],
"size": [317.4, 58],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [3.5],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 22,
"type": "BasicGuider",
"pos": [576, 48],
"size": [222.35, 46],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 42,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [864, 192],
"size": [272.36, 124.54],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 116,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [24],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 25,
"type": "RandomNoise",
"pos": [480, 768],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [219670278747233, "randomize"],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [866, 367],
"size": [210, 46],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 24
},
{
"name": "vae",
"type": "VAE",
"link": 12
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 240],
"size": [422.85, 164.31],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 10
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a maid outfit with a long black gold leaf pattern dress and a white apron mouth open holding a fancy black forest cake with candles on top in the kitchen of an old dark Victorian mansion lit by candlelight with a bright window to the foggy forest and very expensive stuff everywhere"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 30,
"type": "ModelSamplingFlux",
"pos": [480, 1152],
"size": [315, 130],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56,
"slot_index": 0
},
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 115,
"slot_index": 1
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 114,
"slot_index": 2
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [54, 55],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingFlux"
},
"widgets_values": [1.15, 0.5, 1024, 1024]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": [315, 106],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 112
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 113
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [116],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 34,
"type": "PrimitiveNode",
"pos": [432, 480],
"size": [210, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "width"
},
"links": [112, 115],
"slot_index": 0
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 35,
"type": "PrimitiveNode",
"pos": [672, 480],
"size": [210, 82],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "height"
},
"links": [113, 114],
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [48, 144],
"size": [315, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [56],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-dev.safetensors", "default"],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 9,
"type": "SaveImage",
"pos": [1155, 196],
"size": [985.3, 1060.38],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 37,
"type": "Note",
"pos": [480, 1344],
"size": [315.0, 117.98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"The reference sampling implementation auto adjusts the shift value based on the resolution, if you don't want this you can just bypass (CTRL-B) this ModelSamplingFlux node.\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "VAELoader",
"pos": [48, 432],
"size": [311.82, 60.43],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 28,
"type": "Note",
"pos": [48, 576],
"size": [336, 288],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"If you get an error in any of the nodes above make sure the files are in the correct directories.\n\nSee the top of the examples page for the links : https://comfyanonymous.github.io/ComfyUI_examples/flux/\n\nflux1-dev.safetensors goes in: ComfyUI/models/unet/\n\nt5xxl_fp16.safetensors and clip_l.safetensors go in: ComfyUI/models/clip/\n\nae.safetensors goes in: ComfyUI/models/vae/\n\n\nTip: You can set the weight_dtype above to one of the fp8 types if you have memory issues."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 38,
"type": "MarkdownNote",
"pos": [45, 930],
"size": [225, 60],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-dev-1)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[10, 11, 0, 6, 0, "CLIP"],
[12, 10, 0, 8, 1, "VAE"],
[19, 16, 0, 13, 2, "SAMPLER"],
[20, 17, 0, 13, 3, "SIGMAS"],
[24, 13, 0, 8, 0, "LATENT"],
[30, 22, 0, 13, 1, "GUIDER"],
[37, 25, 0, 13, 0, "NOISE"],
[41, 6, 0, 26, 0, "CONDITIONING"],
[42, 26, 0, 22, 1, "CONDITIONING"],
[54, 30, 0, 22, 0, "MODEL"],
[55, 30, 0, 17, 0, "MODEL"],
[56, 12, 0, 30, 0, "MODEL"],
[112, 34, 0, 27, 0, "INT"],
[113, 35, 0, 27, 1, "INT"],
[114, 35, 0, 30, 2, "INT"],
[115, 34, 0, 30, 1, "INT"],
[116, 27, 0, 13, 4, "LATENT"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [-0.18, 2.29]
},
"groupNodes": {
"EmptyLatentImage": {
"nodes": [
{
"type": "PrimitiveNode",
"pos": [432, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 6,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"widget": {
"name": "height"
},
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 0
},
{
"type": "PrimitiveNode",
"pos": [672, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 7,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"slot_index": 0,
"widget": {
"name": "width"
}
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 1
},
{
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"link": null,
"widget": {
"name": "width"
}
},
{
"name": "height",
"type": "INT",
"link": null,
"widget": {
"name": "height"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"index": 2
}
],
"links": [
[1, 0, 2, 0, 34, "INT"],
[0, 0, 2, 1, 35, "INT"]
],
"external": [
[0, 0, "INT"],
[1, 0, "INT"],
[2, 0, "LATENT"]
],
"config": {
"0": {
"output": {
"0": {
"name": "height"
}
},
"input": {
"value": {
"visible": true
}
}
},
"1": {
"output": {
"0": {
"name": "width"
}
},
"input": {
"value": {
"visible": true
}
}
},
"2": {
"input": {
"width": {
"visible": false
},
"height": {
"visible": false
}
}
}
}
}
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/flux1-dev.safetensors?download=true",
"directory": "diffusion_models"
}
]
}

View File

@@ -1,458 +0,0 @@
{
"last_node_id": 45,
"last_link_id": 100,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [81],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 32,
"type": "VAELoader",
"pos": [1352, 422],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 82],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [593, 44],
"size": [317.4, 58],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [80],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [30]
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-237, 79],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 39,
"type": "DifferentialDiffusion",
"pos": [1001, -68],
"size": [277.2, 26],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 85
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [86],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DifferentialDiffusion"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 98],
"size": [210, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 38,
"type": "InpaintModelConditioning",
"pos": [952, 78],
"size": [302.4, 138],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 80
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 81
},
{
"name": "vae",
"type": "VAE",
"link": 82
},
{
"name": "pixels",
"type": "IMAGE",
"link": 99
},
{
"name": "mask",
"type": "MASK",
"link": 100
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [77],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [78],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [88],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InpaintModelConditioning"
},
"widgets_values": [false]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1877, 101],
"size": [828.95, 893.85],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 95
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [1280, 100],
"size": [315, 262],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 86
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 77
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 78
},
{
"name": "latent_image",
"type": "LATENT",
"link": 88
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
656821733471329,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [602, -120],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [85],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-fill-dev.safetensors", "default"]
},
{
"id": 17,
"type": "LoadImage",
"pos": [587, 312],
"size": [315, 314.0],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [99],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [100],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_inpaint_example.png", "image"]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [144, -7],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"anime girl with massive fennec ears blonde hair blue eyes wearing a pink shirt"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 45,
"type": "MarkdownNote",
"pos": [-225, 255],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#fill-inpainting-model)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[77, 38, 0, 3, 1, "CONDITIONING"],
[78, 38, 1, 3, 2, "CONDITIONING"],
[80, 26, 0, 38, 0, "CONDITIONING"],
[81, 7, 0, 38, 1, "CONDITIONING"],
[82, 32, 0, 38, 2, "VAE"],
[85, 31, 0, 39, 0, "MODEL"],
[86, 39, 0, 3, 0, "MODEL"],
[88, 38, 2, 3, 3, "LATENT"],
[95, 8, 0, 9, 0, "IMAGE"],
[99, 17, 0, 38, 3, "IMAGE"],
[100, 17, 1, 38, 4, "MASK"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.21,
"offset": [566.62, 207.73]
}
},
"version": 0.4,
"models": [
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "flux1-fill-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Fill-dev/blob/main/flux1-fill-dev.safetensors",
"directory": "diffusion_models"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -1,491 +0,0 @@
{
"last_node_id": 45,
"last_link_id": 98,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [81],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 32,
"type": "VAELoader",
"pos": [1352, 422],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 82],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [593, 44],
"size": [317.4, 58],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [80],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [30]
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-237, 79],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 39,
"type": "DifferentialDiffusion",
"pos": [1001, -68],
"size": [277.2, 26],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 85
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [86],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DifferentialDiffusion"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 98],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 38,
"type": "InpaintModelConditioning",
"pos": [952, 78],
"size": [302.4, 138],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 80
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 81
},
{
"name": "vae",
"type": "VAE",
"link": 82
},
{
"name": "pixels",
"type": "IMAGE",
"link": 97
},
{
"name": "mask",
"type": "MASK",
"link": 98
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [77],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [78],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [88],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InpaintModelConditioning"
},
"widgets_values": [false]
},
{
"id": 44,
"type": "ImagePadForOutpaint",
"pos": [415, 359],
"size": [315, 174],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 96
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [97],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [98],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ImagePadForOutpaint"
},
"widgets_values": [400, 0, 400, 400, 24]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [144, -7],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["beautiful scenery"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 9,
"type": "SaveImage",
"pos": [1877, 101],
"size": [828.95, 893.85],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 95
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [1280, 100],
"size": [315, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 86
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 77
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 78
},
{
"name": "latent_image",
"type": "LATENT",
"link": 88
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
164211176398261,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 17,
"type": "LoadImage",
"pos": [23, 376],
"size": [315, 314.0],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [96],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [602, -120],
"size": [315, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [85],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-fill-dev.safetensors", "default"]
},
{
"id": 45,
"type": "MarkdownNote",
"pos": [-225, 255],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#fill-inpainting-model)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[77, 38, 0, 3, 1, "CONDITIONING"],
[78, 38, 1, 3, 2, "CONDITIONING"],
[80, 26, 0, 38, 0, "CONDITIONING"],
[81, 7, 0, 38, 1, "CONDITIONING"],
[82, 32, 0, 38, 2, "VAE"],
[85, 31, 0, 39, 0, "MODEL"],
[86, 39, 0, 3, 0, "MODEL"],
[88, 38, 2, 3, 3, "LATENT"],
[95, 8, 0, 9, 0, "IMAGE"],
[96, 17, 0, 44, 0, "IMAGE"],
[97, 44, 0, 38, 3, "IMAGE"],
[98, 44, 1, 38, 4, "MASK"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1,
"offset": [240.64, 211.87]
}
},
"version": 0.4,
"models": [
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "flux1-fill-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Fill-dev/blob/main/flux1-fill-dev.safetensors",
"directory": "diffusion_models"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -1,951 +0,0 @@
{
"last_node_id": 44,
"last_link_id": 123,
"nodes": [
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [48, 288],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"t5xxl_fp16.safetensors",
"clip_l.safetensors",
"flux",
"default"
]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [480, 1008],
"size": [315, 106],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 55,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [20]
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": ["simple", 20, 1]
},
{
"id": 16,
"type": "KSamplerSelect",
"pos": [480, 912],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [19]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [480, 144],
"size": [317.4, 58],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [122],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [3.5],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [864, 192],
"size": [272.36, 124.54],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 116,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [24],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 25,
"type": "RandomNoise",
"pos": [480, 768],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [958831004022715, "randomize"],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [866, 367],
"size": [210, 46],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 24
},
{
"name": "vae",
"type": "VAE",
"link": 12
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 30,
"type": "ModelSamplingFlux",
"pos": [480, 1152],
"size": [315, 130],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56,
"slot_index": 0
},
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 115,
"slot_index": 1
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 114,
"slot_index": 2
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [54, 55],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingFlux"
},
"widgets_values": [1.15, 0.5, 1024, 1024]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": [315, 106],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 112
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 113
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [116],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 34,
"type": "PrimitiveNode",
"pos": [432, 480],
"size": [210, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "width"
},
"links": [112, 115],
"slot_index": 0
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 35,
"type": "PrimitiveNode",
"pos": [672, 480],
"size": [210, 82],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "height"
},
"links": [113, 114],
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [48, 144],
"size": [315, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [56],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-dev.safetensors", "default"],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 9,
"type": "SaveImage",
"pos": [1155, 196],
"size": [985.3, 1060.38],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 37,
"type": "Note",
"pos": [480, 1344],
"size": [315.0, 117.98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"The reference sampling implementation auto adjusts the shift value based on the resolution, if you don't want this you can just bypass (CTRL-B) this ModelSamplingFlux node.\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "VAELoader",
"pos": [48, 432],
"size": [311.82, 60.43],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 28,
"type": "Note",
"pos": [48, 576],
"size": [336, 288],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"If you get an error in any of the nodes above make sure the files are in the correct directories.\n\nSee the top of the examples page for the links : https://comfyanonymous.github.io/ComfyUI_examples/flux/\n\nflux1-dev.safetensors goes in: ComfyUI/models/diffusion_models/\n\nt5xxl_fp16.safetensors and clip_l.safetensors go in: ComfyUI/models/text_encoders/\n\nae.safetensors goes in: ComfyUI/models/vae/\n\n\nTip: You can set the weight_dtype above to one of the fp8 types if you have memory issues."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 39,
"type": "CLIPVisionEncode",
"pos": [420, -300],
"size": [290, 78],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 117
},
{
"name": "image",
"type": "IMAGE",
"link": 118
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [120],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 40,
"type": "LoadImage",
"pos": [60, -300],
"size": [315, 314],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [118]
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 42,
"type": "StyleModelLoader",
"pos": [400, -180],
"size": [340, 60],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STYLE_MODEL",
"type": "STYLE_MODEL",
"links": [119]
}
],
"properties": {
"Node name for S&R": "StyleModelLoader"
},
"widgets_values": ["flux1-redux-dev.safetensors"]
},
{
"id": 38,
"type": "CLIPVisionLoader",
"pos": [60, -410],
"size": [370, 60],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"links": [117],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": ["sigclip_vision_patch14_384.safetensors"]
},
{
"id": 41,
"type": "StyleModelApply",
"pos": [760, -300],
"size": [320, 122],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 122
},
{
"name": "style_model",
"type": "STYLE_MODEL",
"link": 119
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"shape": 7,
"link": 120
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [123],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "StyleModelApply"
},
"widgets_values": [1, "multiply"]
},
{
"id": 22,
"type": "BasicGuider",
"pos": [960, 66],
"size": [222.35, 46],
"flags": {},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 123,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 43,
"type": "Note",
"pos": [1130, -440],
"size": [345.9, 182.31],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"The redux model lets you prompt with images. It can be used with any Flux1 dev or schnell model workflow.\n\nYou can chain multiple \"Apply Style Model\" nodes if you want to mix multiple images together."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 240],
"size": [422.85, 164.31],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 10
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["cute anime girl with massive fluffy fennec ears"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 44,
"type": "MarkdownNote",
"pos": [60, 915],
"size": [225, 60],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#redux)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[10, 11, 0, 6, 0, "CLIP"],
[12, 10, 0, 8, 1, "VAE"],
[19, 16, 0, 13, 2, "SAMPLER"],
[20, 17, 0, 13, 3, "SIGMAS"],
[24, 13, 0, 8, 0, "LATENT"],
[30, 22, 0, 13, 1, "GUIDER"],
[37, 25, 0, 13, 0, "NOISE"],
[41, 6, 0, 26, 0, "CONDITIONING"],
[54, 30, 0, 22, 0, "MODEL"],
[55, 30, 0, 17, 0, "MODEL"],
[56, 12, 0, 30, 0, "MODEL"],
[112, 34, 0, 27, 0, "INT"],
[113, 35, 0, 27, 1, "INT"],
[114, 35, 0, 30, 2, "INT"],
[115, 34, 0, 30, 1, "INT"],
[116, 27, 0, 13, 4, "LATENT"],
[117, 38, 0, 39, 0, "CLIP_VISION"],
[118, 40, 0, 39, 1, "IMAGE"],
[119, 42, 0, 41, 1, "STYLE_MODEL"],
[120, 39, 0, 41, 2, "CLIP_VISION_OUTPUT"],
[122, 26, 0, 41, 0, "CONDITIONING"],
[123, 41, 0, 22, 1, "CONDITIONING"]
],
"groups": [
{
"id": 1,
"title": "Redux Model",
"bounding": [45, -480, 1040, 507.6],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.9,
"offset": [139.8, 57.78]
},
"groupNodes": {
"EmptyLatentImage": {
"nodes": [
{
"type": "PrimitiveNode",
"pos": [432, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 6,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"widget": {
"name": "height"
},
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 0
},
{
"type": "PrimitiveNode",
"pos": [672, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 7,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"slot_index": 0,
"widget": {
"name": "width"
}
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 1
},
{
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"link": null,
"widget": {
"name": "width"
}
},
{
"name": "height",
"type": "INT",
"link": null,
"widget": {
"name": "height"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"index": 2
}
],
"links": [
[1, 0, 2, 0, 34, "INT"],
[0, 0, 2, 1, 35, "INT"]
],
"external": [
[0, 0, "INT"],
[1, 0, "INT"],
[2, 0, "LATENT"]
],
"config": {
"0": {
"output": {
"0": {
"name": "height"
}
},
"input": {
"value": {
"visible": true
}
}
},
"1": {
"output": {
"0": {
"name": "width"
}
},
"input": {
"value": {
"visible": true
}
}
},
"2": {
"input": {
"width": {
"visible": false
},
"height": {
"visible": false
}
}
}
}
}
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "flux1-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/flux1-dev.safetensors?download=true",
"directory": "diffusion_models"
},
{
"name": "sigclip_vision_patch14_384.safetensors",
"url": "https://huggingface.co/Comfy-Org/sigclip_vision_384/resolve/main/sigclip_vision_patch14_384.safetensors?download=true",
"directory": "clip_vision"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-redux-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Redux-dev/resolve/main/flux1-redux-dev.safetensors?download=true",
"directory": "style_models"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 23 KiB

View File

@@ -1,302 +0,0 @@
{
"last_node_id": 37,
"last_link_id": 58,
"nodes": [
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [390, 400],
"size": [422.85, 164.31],
"flags": {
"collapsed": true
},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 54,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [55],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [471, 455],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1151, 195],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 52
},
{
"name": "vae",
"type": "VAE",
"link": 46
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1375, 194],
"size": [985.3, 1060.38],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 31,
"type": "KSampler",
"pos": [816, 192],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 47
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 58
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 55
},
{
"name": "latent_image",
"type": "LATENT",
"link": 51
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [52],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
173805153958730,
"randomize",
4,
1,
"euler",
"simple",
1
]
},
{
"id": 30,
"type": "CheckpointLoaderSimple",
"pos": [48, 192],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [47],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [45, 54],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [46],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["flux1-schnell-fp8.safetensors"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 192],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 45
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a bottle with a beautiful rainbow galaxy inside it on top of a wooden table in the middle of a modern kitchen beside a plate of vegetables and mushrooms and a wine glasse that contains a planet earth with a plate with a half eaten apple pie on it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 34,
"type": "Note",
"pos": [831, 501],
"size": [282.86, 164.08],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"Note that Flux dev and schnell do not have any negative prompt so CFG should be set to 1.0. Setting CFG to 1.0 means the negative prompt is ignored.\n\nThe schnell model is a distilled model that can generate a good image with only 4 steps."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 37,
"type": "MarkdownNote",
"pos": [45, 345],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-schnell-1)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[45, 30, 1, 6, 0, "CLIP"],
[46, 30, 2, 8, 1, "VAE"],
[47, 30, 0, 31, 0, "MODEL"],
[51, 27, 0, 31, 3, "LATENT"],
[52, 31, 0, 8, 0, "LATENT"],
[54, 30, 1, 33, 0, "CLIP"],
[55, 33, 0, 31, 2, "CONDITIONING"],
[58, 6, 0, 31, 1, "CONDITIONING"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [0.68, 1.83]
}
},
"version": 0.4,
"models": [
{
"name": "flux1-schnell-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-schnell/resolve/main/flux1-schnell-fp8.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,376 +0,0 @@
{
"last_node_id": 28,
"last_link_id": 79,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 77
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 57
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1023216319780679,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-260, -340],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [69],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph scenery landscape, snow beautiful scenery mountain, glass bottle; purple galaxy bottle; sun"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [300, 230],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1495, 167],
"size": [493.63, 561.54],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["gligen/testing"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [410, 460],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 27,
"type": "GLIGENTextBoxApply",
"pos": [770, -340],
"size": [437.22, 382.68],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning_to",
"type": "CONDITIONING",
"link": 78
},
{
"name": "clip",
"type": "CLIP",
"link": 74
},
{
"name": "gligen_textbox_model",
"type": "GLIGEN",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [77],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "GLIGENTextBoxApply"
},
"widgets_values": ["sun", 144, 144, 416, 16]
},
{
"id": 21,
"type": "GLIGENTextBoxApply",
"pos": [270, -340],
"size": [437.22, 382.68],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "conditioning_to",
"type": "CONDITIONING",
"link": 69
},
{
"name": "clip",
"type": "CLIP",
"link": 53
},
{
"name": "gligen_textbox_model",
"type": "GLIGEN",
"link": 54
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [65, 78],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "GLIGENTextBoxApply"
},
"widgets_values": ["purple galaxy bottle", 192, 304, 176, 272]
},
{
"id": 10,
"type": "GLIGENLoader",
"pos": [-230, -70],
"size": [390, 60],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "GLIGEN",
"type": "GLIGEN",
"links": [54, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "GLIGENLoader"
},
"widgets_values": ["gligen_sd14_textbox_pruned.safetensors"]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-220, 130],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [5, 53, 67, 74],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [79],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 28,
"type": "MarkdownNote",
"pos": [-210, 285],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/gligen/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[5, 4, 1, 7, 0, "CLIP"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[53, 4, 1, 21, 1, "CLIP"],
[54, 10, 0, 21, 2, "GLIGEN"],
[57, 7, 0, 3, 2, "CONDITIONING"],
[67, 4, 1, 24, 0, "CLIP"],
[69, 24, 0, 21, 0, "CONDITIONING"],
[74, 4, 1, 27, 1, "CLIP"],
[75, 10, 0, 27, 2, "GLIGEN"],
[77, 27, 0, 3, 1, "CONDITIONING"],
[78, 21, 0, 27, 0, "CONDITIONING"],
[79, 4, 2, 8, 1, "VAE"]
],
"groups": [
{
"id": 1,
"title": "Base Prompt",
"bounding": [-315, -465, 518, 302],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "GLIGEN (for best results the elements should match some elements in the base prompt)",
"bounding": [255, -465, 980, 529],
"color": "#A88",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.8,
"offset": [433.59, 361.81]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "gligen_sd14_textbox_pruned.safetensors",
"url": "https://huggingface.co/comfyanonymous/GLIGEN_pruned_safetensors/resolve/main/gligen_sd14_textbox_pruned.safetensors?download=true",
"directory": "gligen"
}
]
}

View File

@@ -1,607 +0,0 @@
{
"last_node_id": 26,
"last_link_id": 35,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1185.5, 412.07],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 30
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 13,
"type": "VAEDecode",
"pos": [3221.22, 232.38],
"size": [210, 46],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 15
},
{
"name": "vae",
"type": "VAE",
"link": 33
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [81.78, 142.34],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 28
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4, 12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece HDR victorian portrait painting of woman, blonde hair, mountain nature, blue sky\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [84.78, 352.34],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 29
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6, 13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands, text, watermark\n"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [142.78, 571.34],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [552.78, 143.34],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 34
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 20],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
251225068430076,
"randomize",
12,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 21,
"type": "VAEDecode",
"pos": [988.18, 29.56],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 20
},
{
"name": "vae",
"type": "VAE",
"link": 32
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAEEncode",
"pos": [2459.1, 103.02],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 26
},
{
"name": "vae",
"type": "VAE",
"link": 31
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncode"
},
"widgets_values": []
},
{
"id": 22,
"type": "ImageUpscaleWithModel",
"pos": [1631.06, 3.66],
"size": [226.8, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "upscale_model",
"type": "UPSCALE_MODEL",
"link": 24
},
{
"name": "image",
"type": "IMAGE",
"link": 23
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageUpscaleWithModel"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1446, 411],
"size": [611.26, 628.6],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "ImageScale",
"pos": [1931, 10],
"size": [315, 130],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 27
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [26],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageScale"
},
"widgets_values": ["bilinear", 1536, 1536, "disabled"]
},
{
"id": 12,
"type": "SaveImage",
"pos": [3463, 230],
"size": [868.01, 936.97],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 17
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 11,
"type": "KSampler",
"pos": [2811.96, 176.22],
"size": [315, 262],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 35,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 12,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 13,
"slot_index": 2
},
{
"name": "latent_image",
"type": "LATENT",
"link": 18,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
783745448521451,
"randomize",
14,
8,
"uni_pc_bh2",
"normal",
0.5
]
},
{
"id": 25,
"type": "CheckpointLoaderSimple",
"pos": [-262, 284],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [34, 35],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [28, 29],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [30, 31, 32, 33],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 23,
"type": "UpscaleModelLoader",
"pos": [1288.06, -39.34],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "UPSCALE_MODEL",
"type": "UPSCALE_MODEL",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UpscaleModelLoader"
},
"widgets_values": ["RealESRGAN_x4plus.pth"]
},
{
"id": 26,
"type": "MarkdownNote",
"pos": [-300, 750],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/#non-latent-upscaling)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[12, 6, 0, 11, 1, "CONDITIONING"],
[13, 7, 0, 11, 2, "CONDITIONING"],
[15, 11, 0, 13, 0, "LATENT"],
[17, 13, 0, 12, 0, "IMAGE"],
[18, 20, 0, 11, 3, "LATENT"],
[20, 3, 0, 21, 0, "LATENT"],
[23, 21, 0, 22, 1, "IMAGE"],
[24, 23, 0, 22, 0, "UPSCALE_MODEL"],
[26, 24, 0, 20, 0, "IMAGE"],
[27, 22, 0, 24, 0, "IMAGE"],
[28, 25, 1, 6, 0, "CLIP"],
[29, 25, 1, 7, 0, "CLIP"],
[30, 25, 2, 8, 1, "VAE"],
[31, 25, 2, 20, 1, "VAE"],
[32, 25, 2, 21, 1, "VAE"],
[33, 25, 2, 13, 1, "VAE"],
[34, 25, 0, 3, 0, "MODEL"],
[35, 25, 0, 11, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Txt2Img",
"bounding": [-300, 0, 1211, 708],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Save Intermediate Image",
"bounding": [1170, 330, 516, 196],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Second pass",
"bounding": [2775, 90, 379, 429],
"color": "#444",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Save Final Image",
"bounding": [3210, 135, 483, 199],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "ESRGAN upscale with 4x model",
"bounding": [1260, -120, 578, 184],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "Decode to Pixel space",
"bounding": [960, -45, 285, 142],
"color": "#A88",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "Encode back to latent space",
"bounding": [2400, 15, 312, 157],
"color": "#A88",
"font_size": 24,
"flags": {}
},
{
"id": 8,
"title": "Downscale image to a more reasonable size",
"bounding": [1845, -75, 483, 245],
"color": "#8AA",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.71,
"offset": [448.42, 482.51]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "RealESRGAN_x4plus.pth",
"url": "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth",
"directory": "upscale_models"
}
]
}

View File

@@ -1,442 +0,0 @@
{
"last_node_id": 17,
"last_link_id": 23,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1235.72, 577.19],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 21
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "LatentUpscale",
"pos": [1238, 170],
"size": [315, 130],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 10
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [14]
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1152, 1152, "disabled"]
},
{
"id": 13,
"type": "VAEDecode",
"pos": [1961, 125],
"size": [210, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 15
},
{
"name": "vae",
"type": "VAE",
"link": 22
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [374, 171],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4, 12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece HDR victorian portrait painting of woman, blonde hair, mountain nature, blue sky\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [377, 381],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6, 13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands, text, watermark\n"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [435, 600],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 11,
"type": "KSampler",
"pos": [1585, 114],
"size": [315, 262],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 12,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 13,
"slot_index": 2
},
{
"name": "latent_image",
"type": "LATENT",
"link": 14,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
469771404043268,
"randomize",
14,
8,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 12,
"type": "SaveImage",
"pos": [2203, 123],
"size": [407.54, 468.13],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 17
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [845, 172],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 18
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
89848141647836,
"randomize",
12,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 16,
"type": "CheckpointLoaderSimple",
"pos": [24, 315],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [18, 23],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [19, 20],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [21, 22],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1495.72, 576.19],
"size": [232.94, 282.43],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 17,
"type": "MarkdownNote",
"pos": [0, 780],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 3, 0, 10, 0, "LATENT"],
[12, 6, 0, 11, 1, "CONDITIONING"],
[13, 7, 0, 11, 2, "CONDITIONING"],
[14, 10, 0, 11, 3, "LATENT"],
[15, 11, 0, 13, 0, "LATENT"],
[17, 13, 0, 12, 0, "IMAGE"],
[18, 16, 0, 3, 0, "MODEL"],
[19, 16, 1, 6, 0, "CLIP"],
[20, 16, 1, 7, 0, "CLIP"],
[21, 16, 2, 8, 1, "VAE"],
[22, 16, 2, 13, 1, "VAE"],
[23, 16, 0, 11, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Txt2Img",
"bounding": [0, 30, 1211, 708],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Save Intermediate Image",
"bounding": [1230, 495, 516, 196],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Hires Fix",
"bounding": [1230, 30, 710, 464],
"color": "#b58b2a",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Save Final Image",
"bounding": [1950, 30, 483, 199],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.97,
"offset": [419.13, 209.33]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,553 +0,0 @@
{
"last_node_id": 78,
"last_link_id": 215,
"nodes": [
{
"id": 16,
"type": "KSamplerSelect",
"pos": [484, 751],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [19]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [478, 860],
"size": [315, 106],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 190,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [20]
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": ["simple", 20, 1]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [520, 100],
"size": [317.4, 58],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 175
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [129],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [6],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 45,
"type": "EmptyHunyuanLatentVideo",
"pos": [475.54, 432.67],
"size": [315, 130],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [180],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyHunyuanLatentVideo"
},
"widgets_values": [848, 480, 73, 1]
},
{
"id": 22,
"type": "BasicGuider",
"pos": [600, 0],
"size": [222.35, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 195,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 129,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 67,
"type": "ModelSamplingSD3",
"pos": [360, 0],
"size": [210, 58],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 209
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [195],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingSD3"
},
"widgets_values": [7]
},
{
"id": 10,
"type": "VAELoader",
"pos": [0, 420],
"size": [350, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [206, 211],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["hunyuan_video_vae_bf16.safetensors"]
},
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [0, 270],
"size": [350, 106],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [205],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"llava_llama3_fp8_scaled.safetensors",
"hunyuan_video",
"default"
]
},
{
"id": 73,
"type": "VAEDecodeTiled",
"pos": [1150, 200],
"size": [210, 150],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 210
},
{
"name": "vae",
"type": "VAE",
"link": 211
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [215],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecodeTiled"
},
"widgets_values": [256, 64, 64, 8]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1150, 90],
"size": [210, 46],
"flags": {},
"order": 15,
"mode": 2,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 181
},
{
"name": "vae",
"type": "VAE",
"link": 206
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 74,
"type": "Note",
"pos": [1150, 360],
"size": [210, 170],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"Use the tiled decode node by default because most people will need it.\n\nLower the tile_size and overlap if you run out of memory."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [0, 150],
"size": [350, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [190, 209],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["hunyuan_video_t2v_720p_bf16.safetensors", "default"],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 77,
"type": "Note",
"pos": [0, 0],
"size": [350, 110],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"Select a fp8 weight_dtype if you are running out of memory."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [860, 200],
"size": [272.36, 124.54],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 180,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [181, 210],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 44,
"type": "CLIPTextEncode",
"pos": [420, 200],
"size": [422.85, 164.31],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 205
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [175],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"anime style anime girl with massive fennec ears and one big fluffy tail, she has blonde hair long hair blue eyes wearing a pink sweater and a long blue skirt walking in a beautiful outdoor scenery with snow mountains in the background"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 75,
"type": "SaveAnimatedWEBP",
"pos": [1410, 200],
"size": [315, 366],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 215
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 80, "default"]
},
{
"id": 25,
"type": "RandomNoise",
"pos": [479, 618],
"size": [315, 82],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [1, "randomize"],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 78,
"type": "MarkdownNote",
"pos": [0, 525],
"size": [225, 60],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/hunyuan_video/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[19, 16, 0, 13, 2, "SAMPLER"],
[20, 17, 0, 13, 3, "SIGMAS"],
[30, 22, 0, 13, 1, "GUIDER"],
[37, 25, 0, 13, 0, "NOISE"],
[129, 26, 0, 22, 1, "CONDITIONING"],
[175, 44, 0, 26, 0, "CONDITIONING"],
[180, 45, 0, 13, 4, "LATENT"],
[181, 13, 0, 8, 0, "LATENT"],
[190, 12, 0, 17, 0, "MODEL"],
[195, 67, 0, 22, 0, "MODEL"],
[205, 11, 0, 44, 0, "CLIP"],
[206, 10, 0, 8, 1, "VAE"],
[209, 12, 0, 67, 0, "MODEL"],
[210, 13, 0, 73, 0, "LATENT"],
[211, 10, 0, 73, 1, "VAE"],
[215, 73, 0, 75, 0, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"groupNodes": {},
"ds": {
"scale": 0.86,
"offset": [315.94, 195.23]
}
},
"version": 0.4,
"models": [
{
"name": "hunyuan_video_vae_bf16.safetensors",
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/vae/hunyuan_video_vae_bf16.safetensors?download=true",
"directory": "vae"
},
{
"name": "llava_llama3_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/text_encoders/llava_llama3_fp8_scaled.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "hunyuan_video_t2v_720p_bf16.safetensors",
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/diffusion_models/hunyuan_video_t2v_720p_bf16.safetensors?download=true",
"directory": "diffusion_models"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

View File

@@ -1,447 +0,0 @@
{
"last_node_id": 14,
"last_link_id": 17,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [
413,
389
],
"size": {
"0": 425.27801513671875,
"1": 180.6060791015625
},
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 15
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
6
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"watermark, text\n"
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
415,
186
],
"size": {
"0": 422.84503173828125,
"1": 164.31304931640625
},
"flags": {},
"order": 2,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
4
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph of victorian woman with wings, sky clouds, meadow grass\n"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
1209,
188
],
"size": {
"0": 210,
"1": 46
},
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 17
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
9
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
}
},
{
"id": 9,
"type": "SaveImage",
"pos": [
1451,
189
],
"size": {
"0": 210,
"1": 58
},
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"properties": {},
"widgets_values": [
"ComfyUI"
]
},
{
"id": 10,
"type": "LoadImage",
"pos": [
215.9799597167969,
703.6800268554688
],
"size": [
315,
314.00002670288086
],
"flags": {},
"order": 0,
"mode": 0,
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
10
],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null,
"shape": 3
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": [
"example.png",
"image"
]
},
{
"id": 12,
"type": "VAEEncode",
"pos": [
614.979959716797,
707.6800268554688
],
"size": {
"0": 210,
"1": 46
},
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 10
},
{
"name": "vae",
"type": "VAE",
"link": 16
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
11
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncode"
}
},
{
"id": 3,
"type": "KSampler",
"pos": [
863,
186
],
"size": {
"0": 315,
"1": 262
},
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 13
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 11
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
7
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
280823642470253,
"randomize",
20,
8,
"dpmpp_2m",
"normal",
0.8700000000000001
]
},
{
"id": 14,
"type": "CheckpointLoaderSimple",
"pos": [
19,
433
],
"size": {
"0": 315,
"1": 98
},
"flags": {},
"order": 1,
"mode": 0,
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
13
],
"shape": 3,
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [
14,
15
],
"shape": 3,
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
16,
17
],
"shape": 3,
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": [
"v1-5-pruned-emaonly-fp16.safetensors"
]
}
],
"links": [
[
4,
6,
0,
3,
1,
"CONDITIONING"
],
[
6,
7,
0,
3,
2,
"CONDITIONING"
],
[
7,
3,
0,
8,
0,
"LATENT"
],
[
9,
8,
0,
9,
0,
"IMAGE"
],
[
10,
10,
0,
12,
0,
"IMAGE"
],
[
11,
12,
0,
3,
3,
"LATENT"
],
[
13,
14,
0,
3,
0,
"MODEL"
],
[
14,
14,
1,
6,
0,
"CLIP"
],
[
15,
14,
1,
7,
0,
"CLIP"
],
[
16,
14,
2,
12,
1,
"VAE"
],
[
17,
14,
2,
8,
1,
"VAE"
]
],
"groups": [
{
"title": "Loading images",
"bounding": [
150,
630,
726,
171
],
"color": "#3f789e"
}
],
"config": {},
"extra": {},
"version": 0.4,
"models": [{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
}]
}

View File

@@ -1,314 +0,0 @@
{
"last_node_id": 24,
"last_link_id": 41,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [867.8, 375.7],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 39
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 17
},
{
"name": "latent_image",
"type": "LATENT",
"link": 18
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
237514639057514,
"randomize",
20,
2.5,
"euler",
"karras",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1207.8, 375.7],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 26
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "SaveAnimatedWEBP",
"pos": [1459, 376],
"size": [741.67, 564.59],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 10
}
],
"outputs": [],
"properties": {
"Node name for S&R": "SaveAnimatedWEBP"
},
"widgets_values": ["ComfyUI", 10, false, 85, "default"]
},
{
"id": 12,
"type": "SVD_img2vid_Conditioning",
"pos": [487.8, 395.7],
"size": [315, 218],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 24
},
{
"name": "init_image",
"type": "IMAGE",
"link": 41,
"slot_index": 1
},
{
"name": "vae",
"type": "VAE",
"link": 25
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"shape": 3,
"links": [40],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"shape": 3,
"links": [17],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"shape": 3,
"links": [18],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "SVD_img2vid_Conditioning"
},
"widgets_values": [1024, 576, 14, 127, 6, 0]
},
{
"id": 14,
"type": "VideoLinearCFGGuidance",
"pos": [487.8, 265.7],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [39],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VideoLinearCFGGuidance"
},
"widgets_values": [1]
},
{
"id": 15,
"type": "ImageOnlyCheckpointLoader",
"pos": [55, 267],
"size": [369.6, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [23],
"slot_index": 0
},
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [24],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [25, 26],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "ImageOnlyCheckpointLoader"
},
"widgets_values": ["svd.safetensors"]
},
{
"id": 23,
"type": "LoadImage",
"pos": [106, 441],
"size": [315, 314.0],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [41]
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["mountains.png", "image"]
},
{
"id": 24,
"type": "MarkdownNote",
"pos": [105, 810],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/video/#image-to-video)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[10, 8, 0, 10, 0, "IMAGE"],
[17, 12, 1, 3, 2, "CONDITIONING"],
[18, 12, 2, 3, 3, "LATENT"],
[23, 15, 0, 14, 0, "MODEL"],
[24, 15, 1, 12, 0, "CLIP_VISION"],
[25, 15, 2, 12, 2, "VAE"],
[26, 15, 2, 8, 1, "VAE"],
[39, 14, 0, 3, 0, "MODEL"],
[40, 12, 0, 3, 1, "CONDITIONING"],
[41, 23, 0, 12, 1, "IMAGE"]
],
"groups": [
{
"id": 1,
"title": "Image to Video",
"bounding": [480, 195, 954, 478],
"color": "#8A8",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.96,
"offset": [255.53, 68.37]
}
},
"version": 0.4,
"models": [
{
"name": "svd.safetensors",
"url": "https://huggingface.co/stabilityai/stable-video-diffusion-img2vid/resolve/main/svd.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,360 +0,0 @@
{
"last_node_id": 31,
"last_link_id": 87,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [432, 158],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 81
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"outdoors in the yosemite national park mountains nature\n\n\n\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [434, 371],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 82
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["watermark, text\n"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1422, 387],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 42
},
{
"name": "vae",
"type": "VAE",
"link": 83
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [22],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [940, 180],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 80
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 72
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
152545289528694,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 29,
"type": "CheckpointLoaderSimple",
"pos": [17, 303],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [80],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [81, 82],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [83, 84],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["512-inpainting-ema.safetensors"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [-107, 726],
"size": [344, 346],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [85],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_outpaint_example.png", "image"]
},
{
"id": 30,
"type": "ImagePadForOutpaint",
"pos": [269, 727],
"size": [315, 174],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 85
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [87],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [86],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ImagePadForOutpaint"
},
"widgets_values": [0, 128, 0, 128, 40]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1671, 384],
"size": [360.55, 441.53],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 22
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 26,
"type": "VAEEncodeForInpaint",
"pos": [617, 720],
"size": [226.8, 98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 87
},
{
"name": "vae",
"type": "VAE",
"link": 84
},
{
"name": "mask",
"type": "MASK",
"link": 86
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [72],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncodeForInpaint"
},
"widgets_values": [8]
},
{
"id": 31,
"type": "MarkdownNote",
"pos": [30, 465],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/inpaint/#outpainting)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[22, 8, 0, 9, 0, "IMAGE"],
[42, 3, 0, 8, 0, "LATENT"],
[72, 26, 0, 3, 3, "LATENT"],
[80, 29, 0, 3, 0, "MODEL"],
[81, 29, 1, 6, 0, "CLIP"],
[82, 29, 1, 7, 0, "CLIP"],
[83, 29, 2, 8, 1, "VAE"],
[84, 29, 2, 26, 1, "VAE"],
[85, 20, 0, 30, 0, "IMAGE"],
[86, 30, 1, 26, 2, "MASK"],
[87, 30, 0, 26, 0, "IMAGE"]
],
"groups": [
{
"id": 1,
"title": "Load image and pad for outpainting",
"bounding": [-120, 600, 1038, 509],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.86,
"offset": [491.92, 146.6]
}
},
"version": 0.4,
"models": [
{
"name": "512-inpainting-ema.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-inpainting/resolve/main/512-inpainting-ema.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,323 +0,0 @@
{
"last_node_id": 30,
"last_link_id": 84,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [432, 158],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 81
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"closeup photograph of maine coon (cat:1.2) in the yosemite national park mountains nature"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [434, 371],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 82
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["watermark, text\n"]
},
{
"id": 26,
"type": "VAEEncodeForInpaint",
"pos": [503, 669],
"size": [226.8, 98],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 73
},
{
"name": "vae",
"type": "VAE",
"link": 83
},
{
"name": "mask",
"type": "MASK",
"link": 79
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [72],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncodeForInpaint"
},
"widgets_values": [6]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1422, 387],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 42
},
{
"name": "vae",
"type": "VAE",
"link": 84
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [22],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1709, 356],
"size": [210, 250],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 22
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 29,
"type": "CheckpointLoaderSimple",
"pos": [30, 314],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [80],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [81, 82],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [83, 84],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["512-inpainting-ema.safetensors"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [49, 679],
"size": [385, 365],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [73],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [79],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_inpaint_example.png", "image"]
},
{
"id": 3,
"type": "KSampler",
"pos": [940, 180],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 80
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 72
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1040111309094545,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 30,
"type": "MarkdownNote",
"pos": [30, 480],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/inpaint/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[22, 8, 0, 9, 0, "IMAGE"],
[42, 3, 0, 8, 0, "LATENT"],
[72, 26, 0, 3, 3, "LATENT"],
[73, 20, 0, 26, 0, "IMAGE"],
[79, 20, 1, 26, 2, "MASK"],
[80, 29, 0, 3, 0, "MODEL"],
[81, 29, 1, 6, 0, "CLIP"],
[82, 29, 1, 7, 0, "CLIP"],
[83, 29, 2, 26, 1, "VAE"],
[84, 29, 2, 8, 1, "VAE"]
],
"groups": [
{
"id": 1,
"title": "Load image and alpha mask for inpainting",
"bounding": [-15, 600, 786, 442],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.88,
"offset": [832.78, 166.61]
}
},
"version": 0.4,
"models": [
{
"name": "512-inpainting-ema.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-inpainting/resolve/main/512-inpainting-ema.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,528 +0,0 @@
{
"last_node_id": 33,
"last_link_id": 62,
"nodes": [
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-60, 229],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [54],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8, 31],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["wd-illusion-fp16.safetensors"]
},
{
"id": 13,
"type": "CheckpointLoaderSimple",
"pos": [1296, -571],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [56],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [27],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["cardosAnime_v10.safetensors"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [370, 40],
"size": [510, 220],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"anime happy girl (fennec:1.2) (ears:1.3) blonde long (messy hair:1.1) blue eyes, wearing serafuku jeans (sitting on rock:1.15) (spread legs:1.15) (sneakers:0.95) in lake rural swiss village on the mountain side sky clouds HDR sunset\n(exceptional, best aesthetic, new, newest, best quality, masterpiece, extremely detailed, anime:1.05)\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [370, 300],
"size": [510, 190],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"lowres, bad anatomy, bad hands, (text:1.1), blurry, mutated hands and fingers, mutation, deformed face, ugly, (logo:1.1), cropped, worst quality, jpeg, (jpeg artifacts), deleted, old, oldest, (censored), (bad aesthetic), (mosaic censoring, bar censor, blur censor) earphones"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [560, 540],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1368, 768, 1]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1280, 140],
"size": [210, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "SaveImage",
"pos": [1540, 140],
"size": [1174.13, 734.16],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 10
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 22,
"type": "CLIPSetLastLayer",
"pos": [1670, -550],
"size": [315, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 27
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [13, 14],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [2060, -920],
"size": [662.38, 313.1],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"from far away anime happy girl (fennec ears:0.95) long (messy hair:1.3) blue eyes, wearing serafuku jeans sitting on rock spread legs (sneakers:0.95) in lake rural swiss village on the mountain side sky clouds HDR sunset\n"
]
},
{
"id": 14,
"type": "CLIPTextEncode",
"pos": [2060, -550],
"size": [660, 300],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 13
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), (text:1.1), letters, numbers, error, cropped, (jpeg artifacts:1.2), (signature:1.1), (watermark:1.1), username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.1), extra legs, (forehead mark) (penis)"
]
},
{
"id": 11,
"type": "VAEDecode",
"pos": [3240, -750],
"size": [210, 46],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 60
},
{
"name": "vae",
"type": "VAE",
"link": 31
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 12,
"type": "SaveImage",
"pos": [3540, -750],
"size": [1868.09, 1101.47],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 12
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 32,
"type": "KSampler",
"pos": [2830, -750],
"size": [315, 262],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 57
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 58
},
{
"name": "latent_image",
"type": "LATENT",
"link": 59
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [60],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
417682270866800,
"randomize",
8,
13,
"dpmpp_sde",
"simple",
0.5
]
},
{
"id": 27,
"type": "LatentUpscaleBy",
"pos": [1510, -160],
"size": [325.41, 82],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 62,
"slot_index": 0
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [59],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscaleBy"
},
"widgets_values": ["bislerp", 1.5]
},
{
"id": 3,
"type": "KSampler",
"pos": [920, 140],
"size": [318.5, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 62],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
758448896326830,
"randomize",
14,
8,
"dpmpp_sde",
"simple",
1
]
},
{
"id": 33,
"type": "MarkdownNote",
"pos": [-45, 375],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/#more-examples)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[3, 4, 1, 6, 0, "CLIP"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[5, 4, 1, 7, 0, "CLIP"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[10, 8, 0, 10, 0, "IMAGE"],
[12, 11, 0, 12, 0, "IMAGE"],
[13, 22, 0, 14, 0, "CLIP"],
[14, 22, 0, 15, 0, "CLIP"],
[27, 13, 1, 22, 0, "CLIP"],
[31, 4, 2, 11, 1, "VAE"],
[54, 4, 0, 3, 0, "MODEL"],
[56, 13, 0, 32, 0, "MODEL"],
[57, 15, 0, 32, 1, "CONDITIONING"],
[58, 14, 0, 32, 2, "CONDITIONING"],
[59, 27, 0, 32, 3, "LATENT"],
[60, 32, 0, 11, 0, "LATENT"],
[62, 3, 0, 27, 0, "LATENT"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.76,
"offset": [1200.17, 444.58]
}
},
"version": 0.4
}

View File

@@ -1,311 +0,0 @@
{
"last_node_id": 11,
"last_link_id": 14,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 13
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["masterpiece best quality girl"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 12
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
851616030078638,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [210, 250],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-461, 288],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [10],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [11],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 10,
"type": "LoraLoader",
"pos": [-25, 144],
"size": [315, 126],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 10
},
{
"name": "clip",
"type": "CLIP",
"link": 11
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [12],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [13, 14],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoraLoader"
},
"widgets_values": ["epiNoiseoffset_v2.safetensors", 1, 1]
},
{
"id": 11,
"type": "MarkdownNote",
"pos": [-450, 435],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/lora/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 4, 0, 10, 0, "MODEL"],
[11, 4, 1, 10, 1, "CLIP"],
[12, 10, 0, 3, 0, "MODEL"],
[13, 10, 1, 6, 0, "CLIP"],
[14, 10, 1, 7, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.06,
"offset": [777.19, 192.48]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "epiNoiseoffset_v2.safetensors",
"url": "https://civitai.com/api/download/models/16576?type=Model&format=SafeTensor&size=full&fp=fp16",
"directory": "loras"
}
]
}

View File

@@ -1,357 +0,0 @@
{
"last_node_id": 12,
"last_link_id": 18,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 13
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["masterpiece best quality girl"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 12
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
513173432917412,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [210, 250],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 10,
"type": "LoraLoader",
"pos": [-27, 160],
"size": [315, 126],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 15
},
{
"name": "clip",
"type": "CLIP",
"link": 16
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [12],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [13, 14],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoraLoader"
},
"widgets_values": ["epiNoiseoffset_v2.safetensors", 1, 1]
},
{
"id": 11,
"type": "LoraLoader",
"pos": [-379, 160],
"size": [315, 126],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 17
},
{
"name": "clip",
"type": "CLIP",
"link": 18
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [15],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [16],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoraLoader"
},
"widgets_values": ["theovercomer8sContrastFix_sd15.safetensors", 1, 1]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-780, 284],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [17],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [18],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 12,
"type": "MarkdownNote",
"pos": [-765, 450],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/lora/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"],
[12, 10, 0, 3, 0, "MODEL"],
[13, 10, 1, 6, 0, "CLIP"],
[14, 10, 1, 7, 0, "CLIP"],
[15, 11, 0, 10, 0, "MODEL"],
[16, 11, 1, 10, 1, "CLIP"],
[17, 4, 0, 11, 0, "MODEL"],
[18, 4, 1, 11, 1, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.76,
"offset": [1200.17, 444.58]
}
},
"version": 0.4,
"models": [
{
"name": "theovercomer8sContrastFix_sd15.safetensors",
"url": "https://civitai.com/api/download/models/10350?type=Model&format=SafeTensor&size=full&fp=fp16",
"directory": "loras"
},
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "epiNoiseoffset_v2.safetensors",
"url": "https://civitai.com/api/download/models/16576?type=Model&format=SafeTensor&size=full&fp=fp16",
"directory": "loras"
}
]
}

View File

@@ -1,482 +0,0 @@
{
"last_node_id": 79,
"last_link_id": 190,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [420, 190],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [187],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"best quality, 4k, HDR, a tracking shot of a beautiful scene of the sea waves on the beach with a massive explosion in the water"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [420, 390],
"size": [425.28, 180.61],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [188],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"low quality, worst quality, deformed, distorted, disfigured, motion smear, motion artifacts, fused fingers, bad anatomy, weird hand, ugly"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1600, 30],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 171
},
{
"name": "vae",
"type": "VAE",
"link": 87
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [106],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 38,
"type": "CLIPLoader",
"pos": [60, 190],
"size": [315, 82],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [74, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5xxl_fp16.safetensors", "ltxv", "default"]
},
{
"id": 41,
"type": "SaveAnimatedWEBP",
"pos": [1830, 30],
"size": [680, 610],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 106
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 90, "default"]
},
{
"id": 44,
"type": "CheckpointLoaderSimple",
"pos": [520, 30],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [181],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": null
},
{
"name": "VAE",
"type": "VAE",
"links": [87, 189],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["ltx-video-2b-v0.9.safetensors"]
},
{
"id": 69,
"type": "LTXVConditioning",
"pos": [920, 60],
"size": [223.87, 78],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 183
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 184
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [166],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [167],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LTXVConditioning"
},
"widgets_values": [25]
},
{
"id": 71,
"type": "LTXVScheduler",
"pos": [856, 531],
"size": [315, 154],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "latent",
"type": "LATENT",
"shape": 7,
"link": 185
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"links": [182],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LTXVScheduler"
},
"widgets_values": [30, 2.05, 0.95, true, 0.1]
},
{
"id": 72,
"type": "SamplerCustom",
"pos": [1201, 32],
"size": [355.2, 230],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 181
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 166
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 167
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 172
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 182
},
{
"name": "latent_image",
"type": "LATENT",
"link": 186
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"links": [171],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustom"
},
"widgets_values": [true, 501744655390087, "randomize", 3]
},
{
"id": 73,
"type": "KSamplerSelect",
"pos": [860, 420],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"links": [172]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 76,
"type": "Note",
"pos": [40, 350],
"size": [360, 200],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"This model needs long descriptive prompts, if the prompt is too short the quality will suffer greatly."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 77,
"type": "LTXVImgToVideo",
"pos": [863, 181],
"size": [315, 214],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 187
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 188
},
{
"name": "vae",
"type": "VAE",
"link": 189
},
{
"name": "image",
"type": "IMAGE",
"link": 190
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [183],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [184],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [185, 186],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "LTXVImgToVideo"
},
"widgets_values": [768, 512, 97, 1, 0.15]
},
{
"id": 78,
"type": "LoadImage",
"pos": [420, 620],
"size": [385.16, 333.33],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [190]
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["island.jpg", "image"]
},
{
"id": 79,
"type": "MarkdownNote",
"pos": [45, 600],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/ltxv/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[74, 38, 0, 6, 0, "CLIP"],
[75, 38, 0, 7, 0, "CLIP"],
[87, 44, 2, 8, 1, "VAE"],
[106, 8, 0, 41, 0, "IMAGE"],
[166, 69, 0, 72, 1, "CONDITIONING"],
[167, 69, 1, 72, 2, "CONDITIONING"],
[171, 72, 0, 8, 0, "LATENT"],
[172, 73, 0, 72, 3, "SAMPLER"],
[181, 44, 0, 72, 0, "MODEL"],
[182, 71, 0, 72, 4, "SIGMAS"],
[183, 77, 0, 69, 0, "CONDITIONING"],
[184, 77, 1, 69, 1, "CONDITIONING"],
[185, 77, 2, 71, 0, "LATENT"],
[186, 77, 2, 72, 5, "LATENT"],
[187, 6, 0, 77, 0, "CONDITIONING"],
[188, 7, 0, 77, 1, "CONDITIONING"],
[189, 44, 2, 77, 2, "VAE"],
[190, 78, 0, 77, 3, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.23,
"offset": [-35.52, 153.62]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ltx-video-2b-v0.9.safetensors",
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,419 +0,0 @@
{
"last_node_id": 77,
"last_link_id": 182,
"nodes": [
{
"id": 38,
"type": "CLIPLoader",
"pos": [60, 190],
"size": [315, 82],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [74, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5xxl_fp16.safetensors", "ltxv", "default"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1600, 30],
"size": [210, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 171
},
{
"name": "vae",
"type": "VAE",
"link": 87
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [106],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 69,
"type": "LTXVConditioning",
"pos": [920, 60],
"size": [223.87, 78],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 169
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 170
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [166],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [167],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LTXVConditioning"
},
"widgets_values": [25]
},
{
"id": 72,
"type": "SamplerCustom",
"pos": [1201, 32],
"size": [355.2, 230],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 181
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 166
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 167
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 172
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 182
},
{
"name": "latent_image",
"type": "LATENT",
"link": 175
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"links": [171],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustom"
},
"widgets_values": [true, 497797676867141, "randomize", 3]
},
{
"id": 44,
"type": "CheckpointLoaderSimple",
"pos": [520, 30],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [181],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": null
},
{
"name": "VAE",
"type": "VAE",
"links": [87],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["ltx-video-2b-v0.9.safetensors"]
},
{
"id": 70,
"type": "EmptyLTXVLatentVideo",
"pos": [860, 240],
"size": [315, 130],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [168, 175],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLTXVLatentVideo"
},
"widgets_values": [768, 512, 97, 1]
},
{
"id": 71,
"type": "LTXVScheduler",
"pos": [856, 531],
"size": [315, 154],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "latent",
"type": "LATENT",
"shape": 7,
"link": 168
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"links": [182],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LTXVScheduler"
},
"widgets_values": [30, 2.05, 0.95, true, 0.1]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [420, 190],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [169],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"A woman with long brown hair and light skin smiles at another woman with long blonde hair. The woman with brown hair wears a black jacket and has a small, barely noticeable mole on her right cheek. The camera angle is a close-up, focused on the woman with brown hair's face. The lighting is warm and natural, likely from the setting sun, casting a soft glow on the scene. The scene appears to be real-life footage."
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [420, 390],
"size": [425.28, 180.61],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [170],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"low quality, worst quality, deformed, distorted, disfigured, motion smear, motion artifacts, fused fingers, bad anatomy, weird hand, ugly"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 73,
"type": "KSamplerSelect",
"pos": [860, 420],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"links": [172]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 76,
"type": "Note",
"pos": [40, 350],
"size": [360, 200],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"This model needs long descriptive prompts, if the prompt is too short the quality will suffer greatly."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 41,
"type": "SaveAnimatedWEBP",
"pos": [1830, 30],
"size": [680, 610],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 106
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 90, "default"]
},
{
"id": 77,
"type": "MarkdownNote",
"pos": [45, 600],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/ltxv/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[74, 38, 0, 6, 0, "CLIP"],
[75, 38, 0, 7, 0, "CLIP"],
[87, 44, 2, 8, 1, "VAE"],
[106, 8, 0, 41, 0, "IMAGE"],
[166, 69, 0, 72, 1, "CONDITIONING"],
[167, 69, 1, 72, 2, "CONDITIONING"],
[168, 70, 0, 71, 0, "LATENT"],
[169, 6, 0, 69, 0, "CONDITIONING"],
[170, 7, 0, 69, 1, "CONDITIONING"],
[171, 72, 0, 8, 0, "LATENT"],
[172, 73, 0, 72, 3, "SAMPLER"],
[175, 70, 0, 72, 5, "LATENT"],
[181, 44, 0, 72, 0, "MODEL"],
[182, 71, 0, 72, 4, "SIGMAS"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.65,
"offset": [1490.32, 926.49]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ltx-video-2b-v0.9.safetensors",
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,492 +0,0 @@
{
"last_node_id": 32,
"last_link_id": 43,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [180, 203],
"size": [425.28, 180.61],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 42
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis)"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [287, 462],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1053, 172],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 28
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-823, -550],
"size": [422.85, 164.31],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 43
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [37],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo) girl photograph realistic (flat chest:0.9), (fennec ears:1.0) (fox ears:1.0), (messy hair) blonde hair, blue eyes, standing, serafuku sweater, (brick house) (scenery HDR landscape) (sun clouds) sky, mountains,\n\n"
]
},
{
"id": 21,
"type": "LoadImage",
"pos": [-560, -144],
"size": [272.84, 372.22],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [33],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["pose_present.png", "image"]
},
{
"id": 31,
"type": "CheckpointLoaderSimple",
"pos": [-1005, 281],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [41],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [42, 43],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AOM3A1.safetensors"]
},
{
"id": 15,
"type": "VAELoader",
"pos": [720, 506],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["kl-f8-anime2.ckpt"]
},
{
"id": 27,
"type": "ControlNetLoader",
"pos": [-641, -245],
"size": [352.55, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [39],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_openpose_fp16.safetensors"]
},
{
"id": 26,
"type": "ControlNetLoader",
"pos": [156, -339],
"size": [343.32, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [38],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_scribble_fp16.safetensors"]
},
{
"id": 22,
"type": "ControlNetApply",
"pos": [-204, -240],
"size": [317.4, 98],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 37
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 39
},
{
"name": "image",
"type": "IMAGE",
"link": 33
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [35],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1]
},
{
"id": 3,
"type": "KSampler",
"pos": [699, 167],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 41
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
894480165483805,
"randomize",
12,
6,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 23,
"type": "ControlNetApply",
"pos": [550.81, -385.59],
"size": [317.4, 98],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 35
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 38
},
{
"name": "image",
"type": "IMAGE",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [0.8]
},
{
"id": 20,
"type": "LoadImage",
"pos": [188, -217],
"size": [278.1, 361.87],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [34],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["house_scribble.png", "image"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1310, 169],
"size": [516.95, 567.67],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 32,
"type": "MarkdownNote",
"pos": [-1005, 435],
"size": [225, 60],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#mixing-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[28, 15, 0, 8, 1, "VAE"],
[33, 21, 0, 22, 2, "IMAGE"],
[34, 20, 0, 23, 2, "IMAGE"],
[35, 22, 0, 23, 0, "CONDITIONING"],
[37, 24, 0, 22, 0, "CONDITIONING"],
[38, 26, 0, 23, 1, "CONTROL_NET"],
[39, 27, 0, 22, 1, "CONTROL_NET"],
[40, 23, 0, 3, 1, "CONDITIONING"],
[41, 31, 0, 3, 0, "MODEL"],
[42, 31, 1, 7, 0, "CLIP"],
[43, 31, 1, 24, 0, "CLIP"]
],
"groups": [
{
"id": 1,
"title": "Apply Pose ControlNet",
"bounding": [-735, -360, 859, 323],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Apply Scribble ControlNet",
"bounding": [165, -480, 739, 336],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.81,
"offset": [2040.05, 734.44]
}
},
"version": 0.4,
"models": [
{
"name": "control_v11p_sd15_scribble_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_scribble_fp16.safetensors?download=true",
"directory": "controlnet"
},
{
"name": "control_v11p_sd15_openpose_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_openpose_fp16.safetensors",
"directory": "controlnet"
}
]
}

View File

@@ -1,308 +0,0 @@
{
"last_node_id": 40,
"last_link_id": 79,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [863, 187],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 79
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 52
},
{
"name": "latent_image",
"type": "LATENT",
"link": 38
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [35],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
704883238463297,
"randomize",
30,
4.5,
"euler",
"simple",
1
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a fox moving quickly in a beautiful winter scenery nature trees sunset tracking camera"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [52],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1210, 190],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 35
},
{
"name": "vae",
"type": "VAE",
"link": 76
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [56],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 21,
"type": "EmptyMochiLatentVideo",
"pos": [520, 620],
"size": [315, 130],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [38],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyMochiLatentVideo"
},
"widgets_values": [848, 480, 37, 1]
},
{
"id": 28,
"type": "SaveAnimatedWEBP",
"pos": [1460, 190],
"size": [847.3, 602.03],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 56
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 80, "default"]
},
{
"id": 37,
"type": "UNETLoader",
"pos": [420, 40],
"size": [315, 82],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [79],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["mochi_preview_bf16.safetensors", "default"]
},
{
"id": 38,
"type": "CLIPLoader",
"pos": [40, 270],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [74, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5xxl_fp16.safetensors", "mochi", "default"]
},
{
"id": 39,
"type": "VAELoader",
"pos": [890, 500],
"size": [278.68, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [76]
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["mochi_vae.safetensors"]
},
{
"id": 40,
"type": "MarkdownNote",
"pos": [45, 405],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/mochi/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[35, 3, 0, 8, 0, "LATENT"],
[38, 21, 0, 3, 3, "LATENT"],
[46, 6, 0, 3, 1, "CONDITIONING"],
[52, 7, 0, 3, 2, "CONDITIONING"],
[56, 8, 0, 28, 0, "IMAGE"],
[74, 38, 0, 6, 0, "CLIP"],
[75, 38, 0, 7, 0, "CLIP"],
[76, 39, 0, 8, 1, "VAE"],
[79, 37, 0, 3, 0, "MODEL"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [35.42, 115.48]
}
},
"version": 0.4,
"models": [
{
"name": "mochi_vae.safetensors",
"url": "https://huggingface.co/Comfy-Org/mochi_preview_repackaged/resolve/main/split_files/vae/mochi_vae.safetensors?download=true",
"directory": "vae"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "mochi_preview_bf16.safetensors",
"url": "https://huggingface.co/Comfy-Org/mochi_preview_repackaged/resolve/main/split_files/diffusion_models/mochi_preview_bf16.safetensors?download=true",
"directory": "diffusion_models"
}
]
}

View File

@@ -1,470 +0,0 @@
{
"last_node_id": 52,
"last_link_id": 105,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1152, 48],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 63
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 33,
"type": "EmptySD3LatentImage",
"pos": [576, 336],
"size": [210, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [66],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 48,
"type": "ImageScale",
"pos": [-320, 448],
"size": [315, 130],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 91
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [92],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageScale"
},
"widgets_values": ["bilinear", 1024, 1024, "center"]
},
{
"id": 49,
"type": "PreviewImage",
"pos": [384, 512],
"size": [443.1, 520.83],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 93
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 50,
"type": "ConditioningZeroOut",
"pos": [203, 133],
"size": [317.4, 26],
"flags": {
"collapsed": true
},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [102],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningZeroOut"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [816, 48],
"size": [284.12, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 103,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 104
},
{
"name": "latent_image",
"type": "LATENT",
"link": 66
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [63],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
790192293768778,
"randomize",
32,
4.5,
"euler",
"simple",
1
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1392, 48],
"size": [882.45, 927.85],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 13
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 45,
"type": "LoadImage",
"pos": [-666, 447],
"size": [288, 336],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [91]
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 47,
"type": "Canny",
"pos": [20, 449],
"size": [315, 82],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 92
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [93, 99],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "Canny"
},
"widgets_values": [0.4, 0.8]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [0, -128],
"size": [320, 192],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 65
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [98, 101],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"happy cute anime fox girl with massive fluffy fennec ears and blonde fluffy hair long hair blue eyes wearing a red scarf a pink sweater and blue jeans\n\nstanding in a beautiful forest with mountains\n\n"
]
},
{
"id": 51,
"type": "ControlNetApplyAdvanced",
"pos": [470, 60],
"size": [315, 186],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 101
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 102
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 100
},
{
"name": "image",
"type": "IMAGE",
"link": 99
},
{
"name": "vae",
"type": "VAE",
"shape": 7,
"link": 105
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [103],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [104],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ControlNetApplyAdvanced"
},
"widgets_values": [0.66, 0, 1]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-576, 64],
"size": [499.99, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [14],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [65],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8, 105],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd3.5_large_fp8_scaled.safetensors"]
},
{
"id": 46,
"type": "ControlNetLoader",
"pos": [-128, 320],
"size": [460.34, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"shape": 3,
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["sd3.5_large_controlnet_canny.safetensors"]
},
{
"id": 52,
"type": "MarkdownNote",
"pos": [-570, 210],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sd3/#sd35-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[8, 4, 2, 8, 1, "VAE"],
[13, 8, 0, 9, 0, "IMAGE"],
[14, 4, 0, 3, 0, "MODEL"],
[63, 3, 0, 8, 0, "LATENT"],
[65, 4, 1, 6, 0, "CLIP"],
[66, 33, 0, 3, 3, "LATENT"],
[91, 45, 0, 48, 0, "IMAGE"],
[92, 48, 0, 47, 0, "IMAGE"],
[93, 47, 0, 49, 0, "IMAGE"],
[98, 6, 0, 50, 0, "CONDITIONING"],
[99, 47, 0, 51, 3, "IMAGE"],
[100, 46, 0, 51, 2, "CONTROL_NET"],
[101, 6, 0, 51, 0, "CONDITIONING"],
[102, 50, 0, 51, 1, "CONDITIONING"],
[103, 51, 0, 3, 1, "CONDITIONING"],
[104, 51, 1, 3, 2, "CONDITIONING"],
[105, 4, 2, 51, 4, "VAE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.91,
"offset": [686.52, 188.52]
}
},
"version": 0.4,
"models": [
{
"name": "sd3.5_large_controlnet_canny.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_canny.safetensors?download=true",
"directory": "controlnet"
},
{
"name": "sd3.5_large_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-3.5-fp8/resolve/main/sd3.5_large_fp8_scaled.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,278 +0,0 @@
{
"last_node_id": 54,
"last_link_id": 102,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1200, 96],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 53,
"slot_index": 1
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1440, 96],
"size": [952.51, 1007.93],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 51,
"slot_index": 0
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 40,
"type": "CLIPTextEncode",
"pos": [384, 336],
"size": [432, 192],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [80],
"slot_index": 0
}
],
"title": "Negative Prompt",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 53,
"type": "EmptySD3LatentImage",
"pos": [480, 576],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-48, 96],
"size": [384.76, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [99],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [101, 102],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [53],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd3.5_large_fp8_scaled.safetensors"]
},
{
"id": 16,
"type": "CLIPTextEncode",
"pos": [384, 96],
"size": [432, 192],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 101
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [21],
"slot_index": 0
}
],
"title": "Positive Prompt",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a bottle with a pink and red galaxy inside it on top of a wooden table on a table in the middle of a modern kitchen with a window to the outdoors mountain range bright sun clouds forest"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 3,
"type": "KSampler",
"pos": [864, 96],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 99,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 21
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 80
},
{
"name": "latent_image",
"type": "LATENT",
"link": 100
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
585483408983215,
"randomize",
20,
4.01,
"euler",
"sgm_uniform",
1
]
},
{
"id": 54,
"type": "MarkdownNote",
"pos": [-45, 240],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sd3/#sd35)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[21, 16, 0, 3, 1, "CONDITIONING"],
[51, 8, 0, 9, 0, "IMAGE"],
[53, 4, 2, 8, 1, "VAE"],
[80, 40, 0, 3, 2, "CONDITIONING"],
[99, 4, 0, 3, 0, "MODEL"],
[100, 53, 0, 3, 3, "LATENT"],
[101, 4, 1, 16, 0, "CLIP"],
[102, 4, 1, 40, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.14,
"offset": [93.35, -1.71]
}
},
"version": 0.4,
"models": [
{
"name": "sd3.5_large_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-3.5-fp8/resolve/main/sd3.5_large_fp8_scaled.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,728 +0,0 @@
{
"last_node_id": 49,
"last_link_id": 44,
"nodes": [
{
"id": 36,
"type": "Note",
"pos": [-74, -470],
"size": [315.7, 147.96],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint BASE",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 37,
"type": "Note",
"pos": [610, -460],
"size": [330, 140],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint REFINER",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations."
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 40,
"type": "Note",
"pos": [1325, 234],
"size": [451.5, 424.42],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - KSampler ADVANCED General Information",
"properties": {
"text": ""
},
"widgets_values": [
"Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)."
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [544.5, 651.12],
"size": [300, 110],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 17,
"type": "VAEDecode",
"pos": [2220.77, 129.6],
"size": [200, 50],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 25
},
{
"name": "vae",
"type": "VAE",
"link": 34
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": [],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 41,
"type": "Note",
"pos": [2160.77, 229.6],
"size": [320, 120],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - VAE Decoder",
"properties": {
"text": ""
},
"widgets_values": [
"This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG."
],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 42,
"type": "Note",
"pos": [564.5, 801.12],
"size": [260, 210],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Empty Latent Image",
"properties": {
"text": ""
},
"widgets_values": [
"This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 11,
"type": "KSamplerAdvanced",
"pos": [1800, 130],
"size": [300, 340],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 23
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 24
},
{
"name": "latent_image",
"type": "LATENT",
"link": 13
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 38,
"slot_index": 4
},
{
"name": "start_at_step",
"type": "INT",
"widget": {
"name": "start_at_step"
},
"link": 44
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [25],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - REFINER",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"disable",
0,
"fixed",
25,
8,
"euler",
"normal",
20,
10000,
"disable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 12,
"type": "CheckpointLoaderSimple",
"pos": [600, -611],
"size": [350, 100],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [14],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [19, 20],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [34],
"slot_index": 2
}
],
"title": "Load Checkpoint - REFINER",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_refiner_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-90, -620],
"size": [350, 100],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [10],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [],
"slot_index": 2
}
],
"title": "Load Checkpoint - BASE",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 47,
"type": "PrimitiveNode",
"pos": [1037.53, 881.61],
"size": [210, 82],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"links": [43, 44],
"slot_index": 0
}
],
"title": "end_at_step",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [20, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 45,
"type": "PrimitiveNode",
"pos": [1039.53, 734.61],
"size": [210, 82],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "steps"
},
"links": [38, 41]
}
],
"title": "steps",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [25, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 48,
"type": "Note",
"pos": [1036, 1018],
"size": [213.91, 110.17],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"These can be used to control the total sampling steps and the step at which the sampling switches to the refiner."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "KSamplerAdvanced",
"pos": [1000, 230],
"size": [300, 334],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 10
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 11
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 12
},
{
"name": "latent_image",
"type": "LATENT",
"link": 27
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 41,
"slot_index": 4
},
{
"name": "end_at_step",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"link": 43,
"slot_index": 5
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [13],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - BASE",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"enable",
6767725640732,
"randomize",
25,
8,
"euler",
"normal",
0,
20,
"enable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 16,
"type": "CLIPTextEncode",
"pos": [1110, -90],
"size": [340, 140],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [1110, -270],
"size": [340, 140],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"daytime scenery sky nature dark blue bottle with a galaxy stars milky way in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [610, 30],
"size": [320, 160],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [11],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["daytime sky nature dark blue galaxy bottle"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [610, 240],
"size": [320, 150],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 19,
"type": "SaveImage",
"pos": [2600, 130],
"size": [735.55, 823.98],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 28
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"],
"color": "#222",
"bgcolor": "#000"
},
{
"id": 49,
"type": "MarkdownNote",
"pos": [-90, -255],
"size": [225, 60],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[3, 4, 1, 6, 0, "CLIP"],
[5, 4, 1, 7, 0, "CLIP"],
[10, 4, 0, 10, 0, "MODEL"],
[11, 6, 0, 10, 1, "CONDITIONING"],
[12, 7, 0, 10, 2, "CONDITIONING"],
[13, 10, 0, 11, 3, "LATENT"],
[14, 12, 0, 11, 0, "MODEL"],
[19, 12, 1, 15, 0, "CLIP"],
[20, 12, 1, 16, 0, "CLIP"],
[23, 15, 0, 11, 1, "CONDITIONING"],
[24, 16, 0, 11, 2, "CONDITIONING"],
[25, 11, 0, 17, 0, "LATENT"],
[27, 5, 0, 10, 3, "LATENT"],
[28, 17, 0, 19, 0, "IMAGE"],
[34, 12, 2, 17, 1, "VAE"],
[38, 45, 0, 11, 4, "INT"],
[41, 45, 0, 10, 4, "INT"],
[43, 47, 0, 10, 5, "INT"],
[44, 47, 0, 11, 5, "INT"]
],
"groups": [
{
"id": 1,
"title": "Base Prompt",
"bounding": [585, -60, 366, 463],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Refiner Prompt",
"bounding": [1095, -360, 376, 429],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Load in BASE SDXL Model",
"bounding": [-105, -705, 369, 399],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Load in REFINER SDXL Model",
"bounding": [585, -705, 391, 400],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "Empty Latent Image",
"bounding": [525, 570, 339, 443],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "VAE Decoder",
"bounding": [2145, 45, 360, 350],
"color": "#b06634",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "Step Control",
"bounding": [1005, 630, 284, 524],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.63,
"offset": [1264.03, 812.09]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "sd_xl_refiner_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,490 +0,0 @@
{
"last_node_id": 41,
"last_link_id": 106,
"nodes": [
{
"id": 13,
"type": "CLIPVisionEncode",
"pos": [135, -63],
"size": [253.6, 78],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 101
},
{
"name": "image",
"type": "IMAGE",
"link": 95
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 36,
"type": "CLIPVisionEncode",
"pos": [137, 24],
"size": [253.6, 78],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 102
},
{
"name": "image",
"type": "IMAGE",
"link": 98
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1277, -210],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 106
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-130, -295],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["anime"]
},
{
"id": 34,
"type": "LoadImage",
"pos": [-352, -29],
"size": [435.35, 377.59],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["mountains.png", "image"]
},
{
"id": 38,
"type": "LoadImage",
"pos": [-341, 412],
"size": [435.35, 377.59],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [98],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sunset.png", "image"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [425, -18],
"size": [425.28, 180.61],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 105
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 40,
"type": "CheckpointLoaderSimple",
"pos": [-761, -275],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [103],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [104, 105],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [106],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"]
},
{
"id": 39,
"type": "CLIPVisionLoader",
"pos": [-760, -120],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [101, 102],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": ["clip_vision_g.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1542, -209],
"size": [635.19, 692.82],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [915, -218],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 103
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 97
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
133632471276133,
"randomize",
26,
8,
"dpmpp_3m_sde_gpu",
"exponential",
1
]
},
{
"id": 19,
"type": "unCLIPConditioning",
"pos": [347, -207],
"size": [262, 102],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 23
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 24
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [96],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 37,
"type": "unCLIPConditioning",
"pos": [626, -205],
"size": [262, 102],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 96
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 100
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [534, 214],
"size": [315, 106],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 41,
"type": "MarkdownNote",
"pos": [-750, -15],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/#revision)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[23, 6, 0, 19, 0, "CONDITIONING"],
[24, 13, 0, 19, 1, "CLIP_VISION_OUTPUT"],
[95, 34, 0, 13, 1, "IMAGE"],
[96, 19, 0, 37, 0, "CONDITIONING"],
[97, 37, 0, 3, 1, "CONDITIONING"],
[98, 38, 0, 36, 1, "IMAGE"],
[100, 36, 0, 37, 1, "CLIP_VISION_OUTPUT"],
[101, 39, 0, 13, 0, "CLIP_VISION"],
[102, 39, 0, 36, 0, "CLIP_VISION"],
[103, 40, 0, 3, 0, "MODEL"],
[104, 40, 1, 6, 0, "CLIP"],
[105, 40, 1, 7, 0, "CLIP"],
[106, 40, 2, 8, 1, "VAE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [962.72, 417.65]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "clip_vision_g.safetensors",
"url": "https://huggingface.co/comfyanonymous/clip_vision_g/resolve/main/clip_vision_g.safetensors?download=true",
"directory": "clip_vision"
}
]
}

View File

@@ -1,494 +0,0 @@
{
"last_node_id": 43,
"last_link_id": 111,
"nodes": [
{
"id": 13,
"type": "CLIPVisionEncode",
"pos": [135, -63],
"size": [253.6, 78],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 101
},
{
"name": "image",
"type": "IMAGE",
"link": 95
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 36,
"type": "CLIPVisionEncode",
"pos": [137, 24],
"size": [253.6, 78],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 102
},
{
"name": "image",
"type": "IMAGE",
"link": 98
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1277, -210],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 106
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 34,
"type": "LoadImage",
"pos": [-352, -29],
"size": [435.35, 377.59],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["mountains.png", "image"]
},
{
"id": 38,
"type": "LoadImage",
"pos": [-341, 412],
"size": [435.35, 377.59],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [98],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sunset.png", "image"]
},
{
"id": 40,
"type": "CheckpointLoaderSimple",
"pos": [-761, -275],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [103],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [104],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [106],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"]
},
{
"id": 39,
"type": "CLIPVisionLoader",
"pos": [-760, -120],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [101, 102],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": ["clip_vision_g.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1542, -209],
"size": [635.19, 692.82],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [915, -218],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 103
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 97
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 111
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
133632471276133,
"randomize",
26,
8,
"dpmpp_3m_sde_gpu",
"exponential",
1
]
},
{
"id": 37,
"type": "unCLIPConditioning",
"pos": [626, -205],
"size": [262, 102],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 96
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 100
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [534, 214],
"size": [315, 106],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 19,
"type": "unCLIPConditioning",
"pos": [347, -207],
"size": [262, 102],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 110
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 24
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [96],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 42,
"type": "ConditioningZeroOut",
"pos": [60, -211],
"size": [211.6, 26],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 109,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [110],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningZeroOut"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-182, -184],
"size": [422.85, 164.31],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [109, 111],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""]
},
{
"id": 43,
"type": "MarkdownNote",
"pos": [-750, -15],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/#revision)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[24, 13, 0, 19, 1, "CLIP_VISION_OUTPUT"],
[95, 34, 0, 13, 1, "IMAGE"],
[96, 19, 0, 37, 0, "CONDITIONING"],
[97, 37, 0, 3, 1, "CONDITIONING"],
[98, 38, 0, 36, 1, "IMAGE"],
[100, 36, 0, 37, 1, "CLIP_VISION_OUTPUT"],
[101, 39, 0, 13, 0, "CLIP_VISION"],
[102, 39, 0, 36, 0, "CLIP_VISION"],
[103, 40, 0, 3, 0, "MODEL"],
[104, 40, 1, 6, 0, "CLIP"],
[106, 40, 2, 8, 1, "VAE"],
[109, 6, 0, 42, 0, "CONDITIONING"],
[110, 42, 0, 19, 0, "CONDITIONING"],
[111, 6, 0, 3, 2, "CONDITIONING"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.49,
"offset": [1046.06, 311.39]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "clip_vision_g.safetensors",
"url": "https://huggingface.co/comfyanonymous/clip_vision_g/resolve/main/clip_vision_g.safetensors?download=true",
"directory": "clip_vision"
}
]
}

View File

@@ -1,896 +0,0 @@
{
"last_node_id": 49,
"last_link_id": 44,
"nodes": [
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [1139.11, -121.79],
"size": [210, 54],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 21,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"evening sunset scenery blue sky nature, glass bottle with a galaxy in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 16,
"type": "CLIPTextEncode",
"pos": [1139.11, -31.79],
"size": [210, 54],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 22,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 14,
"type": "PrimitiveNode",
"pos": [117.74, 335.18],
"size": [300, 160],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"widget": {
"name": "text"
},
"links": [18, 22],
"slot_index": 0
}
],
"title": "Negative Prompt (Text)",
"properties": {
"Run widget replace on values": false
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 13,
"type": "PrimitiveNode",
"pos": [117.74, 135.18],
"size": [300, 160],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"widget": {
"name": "text"
},
"links": [16, 21],
"slot_index": 0
}
],
"title": "Positive Prompt (Text)",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [
"evening sunset scenery blue sky nature, glass bottle with a galaxy in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 36,
"type": "Note",
"pos": [-74, -470],
"size": [315.7, 147.96],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint BASE",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 37,
"type": "Note",
"pos": [610, -460],
"size": [330, 140],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint REFINER",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations."
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 38,
"type": "Note",
"pos": [126.74, 534.18],
"size": [284.33, 123.89],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Text Prompts",
"properties": {
"text": ""
},
"widgets_values": [
"These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type."
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 40,
"type": "Note",
"pos": [1325, 234],
"size": [451.5, 424.42],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - KSampler ADVANCED General Information",
"properties": {
"text": ""
},
"widgets_values": [
"Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)."
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [544.5, 651.12],
"size": [300, 110],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 17,
"type": "VAEDecode",
"pos": [2220.77, 129.6],
"size": [200, 50],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 25
},
{
"name": "vae",
"type": "VAE",
"link": 34
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": [],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 41,
"type": "Note",
"pos": [2160.77, 229.6],
"size": [320, 120],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - VAE Decoder",
"properties": {
"text": ""
},
"widgets_values": [
"This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG."
],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 42,
"type": "Note",
"pos": [564.5, 801.12],
"size": [260, 210],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Empty Latent Image",
"properties": {
"text": ""
},
"widgets_values": [
"This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 43,
"type": "Note",
"pos": [1125, 70],
"size": [240, 80],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - CLIP Encode (REFINER)",
"properties": {
"text": ""
},
"widgets_values": [
"These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [599.5, 269.48],
"size": [210, 54],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 16,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [11],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"evening sunset scenery blue sky nature, glass bottle with a galaxy in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [599.5, 359.48],
"size": [210, 54],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 18,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 39,
"type": "Note",
"pos": [599.5, 449.48],
"size": [210, 80],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - CLIP Encode (BASE)",
"properties": {
"text": ""
},
"widgets_values": [
"These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 11,
"type": "KSamplerAdvanced",
"pos": [1800, 130],
"size": [300, 340],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 23
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 24
},
{
"name": "latent_image",
"type": "LATENT",
"link": 13
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 38,
"slot_index": 4
},
{
"name": "start_at_step",
"type": "INT",
"widget": {
"name": "start_at_step"
},
"link": 44
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [25],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - REFINER",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"disable",
0,
"fixed",
25,
8,
"euler",
"normal",
20,
10000,
"disable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 12,
"type": "CheckpointLoaderSimple",
"pos": [600, -611],
"size": [350, 100],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [14],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [19, 20],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [34],
"slot_index": 2
}
],
"title": "Load Checkpoint - REFINER",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_refiner_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-90, -620],
"size": [350, 100],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [10],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [],
"slot_index": 2
}
],
"title": "Load Checkpoint - BASE",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 19,
"type": "SaveImage",
"pos": [2600, 130],
"size": [565.77, 596.38],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 28
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"],
"color": "#222",
"bgcolor": "#000"
},
{
"id": 47,
"type": "PrimitiveNode",
"pos": [1037.53, 881.61],
"size": [210, 82],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"links": [43, 44],
"slot_index": 0
}
],
"title": "end_at_step",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [20, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 45,
"type": "PrimitiveNode",
"pos": [1039.53, 734.61],
"size": [210, 82],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "steps"
},
"links": [38, 41]
}
],
"title": "steps",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [25, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 48,
"type": "Note",
"pos": [1036, 1018],
"size": [213.91, 110.17],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"These can be used to control the total sampling steps and the step at which the sampling switches to the refiner."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "KSamplerAdvanced",
"pos": [1000, 230],
"size": [300, 334],
"flags": {},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 10
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 11
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 12
},
{
"name": "latent_image",
"type": "LATENT",
"link": 27
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 41,
"slot_index": 4
},
{
"name": "end_at_step",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"link": 43,
"slot_index": 5
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [13],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - BASE",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"enable",
721897303308196,
"randomize",
25,
8,
"euler",
"normal",
0,
20,
"enable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 49,
"type": "MarkdownNote",
"pos": [-105, -255],
"size": [225, 60],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[3, 4, 1, 6, 0, "CLIP"],
[5, 4, 1, 7, 0, "CLIP"],
[10, 4, 0, 10, 0, "MODEL"],
[11, 6, 0, 10, 1, "CONDITIONING"],
[12, 7, 0, 10, 2, "CONDITIONING"],
[13, 10, 0, 11, 3, "LATENT"],
[14, 12, 0, 11, 0, "MODEL"],
[16, 13, 0, 6, 1, "STRING"],
[18, 14, 0, 7, 1, "STRING"],
[19, 12, 1, 15, 0, "CLIP"],
[20, 12, 1, 16, 0, "CLIP"],
[21, 13, 0, 15, 1, "STRING"],
[22, 14, 0, 16, 1, "STRING"],
[23, 15, 0, 11, 1, "CONDITIONING"],
[24, 16, 0, 11, 2, "CONDITIONING"],
[25, 11, 0, 17, 0, "LATENT"],
[27, 5, 0, 10, 3, "LATENT"],
[28, 17, 0, 19, 0, "IMAGE"],
[34, 12, 2, 17, 1, "VAE"],
[38, 45, 0, 11, 4, "INT"],
[41, 45, 0, 10, 4, "INT"],
[43, 47, 0, 10, 5, "INT"],
[44, 47, 0, 11, 5, "INT"]
],
"groups": [
{
"id": 1,
"title": "Base Prompt",
"bounding": [585, 195, 252, 361],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Refiner Prompt",
"bounding": [1095, -195, 282, 372],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Text Prompts",
"bounding": [105, 45, 339, 622],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Load in BASE SDXL Model",
"bounding": [-105, -705, 369, 399],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "Load in REFINER SDXL Model",
"bounding": [585, -705, 391, 400],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "Empty Latent Image",
"bounding": [525, 570, 339, 443],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "VAE Decoder",
"bounding": [2145, 45, 360, 350],
"color": "#b06634",
"font_size": 24,
"flags": {}
},
{
"id": 8,
"title": "Step Control",
"bounding": [1005, 630, 284, 524],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.78,
"offset": [685.2, 1020.68]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "sd_xl_refiner_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,372 +0,0 @@
{
"last_node_id": 28,
"last_link_id": 54,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [352, 176],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 39
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [20],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 20,
"type": "CheckpointLoaderSimple",
"pos": [-17, -70],
"size": [343.7, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [41, 45],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [38, 39],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [40],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_turbo_1.0_fp16.safetensors"]
},
{
"id": 14,
"type": "KSamplerSelect",
"pos": [452, -144],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [18]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler_ancestral"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [462, 398],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1183, -66],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 28
},
{
"name": "vae",
"type": "VAE",
"link": 40,
"slot_index": 1
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [53, 54],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 25,
"type": "PreviewImage",
"pos": [1213, 93],
"size": [501.7, 541.92],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 53
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 22,
"type": "SDTurboScheduler",
"pos": [452, -248],
"size": [315, 82],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 45,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "SDTurboScheduler"
},
"widgets_values": [1, 1]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [351, -45],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 38,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [19],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"beautiful landscape scenery glass bottle with a galaxy inside cute fennec fox snow HDR sunset"
]
},
{
"id": 27,
"type": "SaveImage",
"pos": [1843, -154],
"size": [466.79, 516.83],
"flags": {},
"order": 10,
"mode": 2,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 54
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 13,
"type": "SamplerCustom",
"pos": [800, -66],
"size": [355.2, 230],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 41,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 19,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 20
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 18,
"slot_index": 3
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 49,
"slot_index": 4
},
{
"name": "latent_image",
"type": "LATENT",
"link": 23,
"slot_index": 5
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [28],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustom"
},
"widgets_values": [true, 0, "fixed", 1]
},
{
"id": 28,
"type": "MarkdownNote",
"pos": [-15, 90],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdturbo/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[18, 14, 0, 13, 3, "SAMPLER"],
[19, 6, 0, 13, 1, "CONDITIONING"],
[20, 7, 0, 13, 2, "CONDITIONING"],
[23, 5, 0, 13, 5, "LATENT"],
[28, 13, 0, 8, 0, "LATENT"],
[38, 20, 1, 6, 0, "CLIP"],
[39, 20, 1, 7, 0, "CLIP"],
[40, 20, 2, 8, 1, "VAE"],
[41, 20, 0, 13, 0, "MODEL"],
[45, 20, 0, 22, 0, "MODEL"],
[49, 22, 0, 13, 4, "SIGMAS"],
[53, 8, 0, 25, 0, "IMAGE"],
[54, 8, 0, 27, 0, "IMAGE"]
],
"groups": [
{
"id": 1,
"title": "Unmute (CTRL-M) if you want to save images.",
"bounding": [1815, -255, 536, 676],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 1.02,
"offset": [311.24, 325.56]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_turbo_1.0_fp16.safetensors",
"url": "https://huggingface.co/stabilityai/sdxl-turbo/resolve/main/sd_xl_turbo_1.0_fp16.safetensors",
"directory": "checkpoints"
}
]
}

View File

@@ -1,302 +0,0 @@
{
"last_node_id": 18,
"last_link_id": 26,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [864, 96],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 18
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 12,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
840755638734093,
"randomize",
50,
4.98,
"dpmpp_3m_sde_gpu",
"exponential",
1
]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [0, 240],
"size": [336, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [18],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [14],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["stable_audio_open_1.0.safetensors"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 96],
"size": [432, 144],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 25
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["heaven church electronic dance music"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [384, 288],
"size": [432, 144],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 26
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 10,
"type": "CLIPLoader",
"pos": [0, 96],
"size": [335.65, 82],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [25, 26],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5_base.safetensors", "stable_audio", "default"]
},
{
"id": 11,
"type": "EmptyLatentAudio",
"pos": [576, 480],
"size": [240, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [12]
}
],
"properties": {
"Node name for S&R": "EmptyLatentAudio"
},
"widgets_values": [47.6, 1]
},
{
"id": 12,
"type": "VAEDecodeAudio",
"pos": [1200, 96],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 13
},
{
"name": "vae",
"type": "VAE",
"link": 14,
"slot_index": 1
}
],
"outputs": [
{
"name": "AUDIO",
"type": "AUDIO",
"shape": 3,
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecodeAudio"
},
"widgets_values": []
},
{
"id": 13,
"type": "SaveAudio",
"pos": [1440, 96],
"size": [355.22, 100],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "audio",
"type": "AUDIO",
"link": 15
}
],
"outputs": [],
"properties": {
"Node name for S&R": "SaveAudio"
},
"widgets_values": ["audio/ComfyUI", ""]
},
{
"id": 18,
"type": "MarkdownNote",
"pos": [15, 390],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/audio/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[12, 11, 0, 3, 3, "LATENT"],
[13, 3, 0, 12, 0, "LATENT"],
[14, 4, 2, 12, 1, "VAE"],
[15, 12, 0, 13, 0, "AUDIO"],
[18, 4, 0, 3, 0, "MODEL"],
[25, 10, 0, 6, 0, "CLIP"],
[26, 10, 0, 7, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1,
"offset": [201.78, 380.0]
}
},
"version": 0.4,
"models": [
{
"name": "t5_base.safetensors",
"url": "https://huggingface.co/google-t5/t5-base/resolve/main/model.safetensors",
"directory": "text_encoders"
},
{
"name": "stable_audio_open_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-audio-open-1.0/resolve/main/model.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,273 +0,0 @@
{
"last_node_id": 27,
"last_link_id": 55,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1207.8, 375.7],
"size": [210, 46],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 26
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [868, 376],
"size": [315, 262],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 42
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 53
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 54
},
{
"name": "latent_image",
"type": "LATENT",
"link": 55
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
237514639057560,
"fixed",
20,
5,
"euler",
"sgm_uniform",
1
]
},
{
"id": 25,
"type": "SaveImage",
"pos": [1459, 378],
"size": [262.29, 308.65],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["3d/ComfyUI"]
},
{
"id": 23,
"type": "LoadImage",
"pos": [175, 438],
"size": [316.52, 405.71],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [51],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["hypernetwork_example_output.png", "image"]
},
{
"id": 26,
"type": "StableZero123_Conditioning",
"pos": [514, 394],
"size": [315, 194],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 50
},
{
"name": "init_image",
"type": "IMAGE",
"link": 51
},
{
"name": "vae",
"type": "VAE",
"link": 52
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"shape": 3,
"links": [53],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"shape": 3,
"links": [54],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"shape": 3,
"links": [55],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "StableZero123_Conditioning"
},
"widgets_values": [256, 256, 1, 10, 142]
},
{
"id": 15,
"type": "ImageOnlyCheckpointLoader",
"pos": [89, 290],
"size": [369.6, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [42],
"slot_index": 0
},
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [50],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [26, 52],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "ImageOnlyCheckpointLoader"
},
"widgets_values": ["stable_zero123.ckpt"]
},
{
"id": 27,
"type": "MarkdownNote",
"pos": [-75, 450],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/3d/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[26, 15, 2, 8, 1, "VAE"],
[42, 15, 0, 3, 0, "MODEL"],
[49, 8, 0, 25, 0, "IMAGE"],
[50, 15, 1, 26, 0, "CLIP_VISION"],
[51, 23, 0, 26, 1, "IMAGE"],
[52, 15, 2, 26, 2, "VAE"],
[53, 26, 0, 3, 1, "CONDITIONING"],
[54, 26, 1, 3, 2, "CONDITIONING"],
[55, 26, 2, 3, 3, "LATENT"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.75,
"offset": [439.73, 40.67]
}
},
"version": 0.4,
"models": [
{
"name": "stable_zero123.ckpt",
"url": "https://huggingface.co/stabilityai/stable-zero123/resolve/main/stable_zero123.ckpt",
"directory": "checkpoints"
}
]
}

View File

@@ -1,535 +0,0 @@
{
"last_node_id": 23,
"last_link_id": 40,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [1843.74, 476.56],
"size": [315, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 39
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 17
},
{
"name": "latent_image",
"type": "LATENT",
"link": 18
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
237514639057514,
"randomize",
20,
2.5,
"euler",
"karras",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [2183.74, 476.56],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 26
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "SaveAnimatedWEBP",
"pos": [1654, 829],
"size": [741.67, 564.59],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 10
}
],
"outputs": [],
"properties": {
"Node name for S&R": "SaveAnimatedWEBP"
},
"widgets_values": ["ComfyUI", 10, false, 85, "default"]
},
{
"id": 12,
"type": "SVD_img2vid_Conditioning",
"pos": [1463.74, 496.56],
"size": [315, 218],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 24
},
{
"name": "init_image",
"type": "IMAGE",
"link": 35,
"slot_index": 1
},
{
"name": "vae",
"type": "VAE",
"link": 25
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"shape": 3,
"links": [40],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"shape": 3,
"links": [17],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"shape": 3,
"links": [18],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "SVD_img2vid_Conditioning"
},
"widgets_values": [1024, 576, 25, 127, 6, 0]
},
{
"id": 14,
"type": "VideoLinearCFGGuidance",
"pos": [1463.74, 366.56],
"size": [315, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [39],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VideoLinearCFGGuidance"
},
"widgets_values": [1]
},
{
"id": 15,
"type": "ImageOnlyCheckpointLoader",
"pos": [1050, 320],
"size": [369.6, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [23],
"slot_index": 0
},
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [24],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [25, 26],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "ImageOnlyCheckpointLoader"
},
"widgets_values": ["svd_xt.safetensors"]
},
{
"id": 16,
"type": "CheckpointLoaderSimple",
"pos": [0, 510],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [28],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [29, 31],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [34]
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"]
},
{
"id": 17,
"type": "KSampler",
"pos": [802.4, 566.4],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 28
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 30
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 32
},
{
"name": "latent_image",
"type": "LATENT",
"link": 37,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [33],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
144698910769133,
"randomize",
15,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 18,
"type": "CLIPTextEncode",
"pos": [342.4, 516.4],
"size": [390, 130],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 29
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph beautiful scenery nature mountains alps river rapids snow sky cumulus clouds"
]
},
{
"id": 19,
"type": "CLIPTextEncode",
"pos": [342.4, 696.4],
"size": [390, 130],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 31
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [32],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 20,
"type": "VAEDecode",
"pos": [1172.4, 566.4],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 33
},
{
"name": "vae",
"type": "VAE",
"link": 34,
"slot_index": 1
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [35, 36],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 21,
"type": "PreviewImage",
"pos": [1152.4, 656.4],
"size": [275.95, 246],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 36
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 22,
"type": "EmptyLatentImage",
"pos": [422.4, 866.4],
"size": [310, 110],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 576, 1]
},
{
"id": 23,
"type": "MarkdownNote",
"pos": [0, 660],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/video/#image-to-video)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[10, 8, 0, 10, 0, "IMAGE"],
[17, 12, 1, 3, 2, "CONDITIONING"],
[18, 12, 2, 3, 3, "LATENT"],
[23, 15, 0, 14, 0, "MODEL"],
[24, 15, 1, 12, 0, "CLIP_VISION"],
[25, 15, 2, 12, 2, "VAE"],
[26, 15, 2, 8, 1, "VAE"],
[28, 16, 0, 17, 0, "MODEL"],
[29, 16, 1, 18, 0, "CLIP"],
[30, 18, 0, 17, 1, "CONDITIONING"],
[31, 16, 1, 19, 0, "CLIP"],
[32, 19, 0, 17, 2, "CONDITIONING"],
[33, 17, 0, 20, 0, "LATENT"],
[34, 16, 2, 20, 1, "VAE"],
[35, 20, 0, 12, 1, "IMAGE"],
[36, 20, 0, 21, 0, "IMAGE"],
[37, 22, 0, 17, 3, "LATENT"],
[39, 14, 0, 3, 0, "MODEL"],
[40, 12, 0, 3, 1, "CONDITIONING"]
],
"groups": [
{
"id": 1,
"title": "Image to Video",
"bounding": [1455, 300, 954, 478],
"color": "#8A8",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Text to Image",
"bounding": [330, 435, 1106, 544],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 1.13,
"offset": [502.97, -29.59]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "svd_xt.safetensors",
"url": "https://huggingface.co/stabilityai/stable-video-diffusion-img2vid-xt/resolve/main/svd_xt.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Some files were not shown because too many files have changed in this diff Show More