[CI] Include workflow templates at build time (#2775)

This commit is contained in:
Chenlei Hu
2025-03-02 15:22:18 -05:00
committed by GitHub
parent 6303992f4e
commit 9431c955a6
110 changed files with 59 additions and 22016 deletions

View File

@@ -29,6 +29,7 @@ jobs:
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
run: |
npm ci
npm run fetch-templates
npm run build
npm run zipdist
- name: Upload dist artifact

View File

@@ -39,6 +39,7 @@ jobs:
- name: Build ComfyUI_frontend
run: |
npm ci
npm run fetch-templates
npm run build
working-directory: ComfyUI_frontend

7
.gitignore vendored
View File

@@ -48,3 +48,10 @@ dist.zip
# Generated JSON Schemas
/schemas/
# Workflow templates assets
# Hosted on https://github.com/Comfy-Org/workflow_templates
/public/templates/
# Temporary repository directory
templates_repo/

22
package-lock.json generated
View File

@@ -50,6 +50,7 @@
"@pinia/testing": "^0.1.5",
"@playwright/test": "^1.44.1",
"@trivago/prettier-plugin-sort-imports": "^5.2.0",
"@types/fs-extra": "^11.0.4",
"@types/lodash": "^4.17.6",
"@types/node": "^20.14.8",
"@types/three": "^0.169.0",
@@ -2825,6 +2826,17 @@
"integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==",
"dev": true
},
"node_modules/@types/fs-extra": {
"version": "11.0.4",
"resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-11.0.4.tgz",
"integrity": "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/jsonfile": "*",
"@types/node": "*"
}
},
"node_modules/@types/json-schema": {
"version": "7.0.15",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz",
@@ -2832,6 +2844,16 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/jsonfile": {
"version": "6.1.4",
"resolved": "https://registry.npmjs.org/@types/jsonfile/-/jsonfile-6.1.4.tgz",
"integrity": "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/@types/linkify-it": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-5.0.0.tgz",

View File

@@ -28,7 +28,8 @@
"lint:fix": "eslint src --fix",
"locale": "lobe-i18n locale",
"collect-i18n": "playwright test --config=playwright.i18n.config.ts",
"json-schema": "tsx scripts/generate-json-schema.ts"
"json-schema": "tsx scripts/generate-json-schema.ts",
"fetch-templates": "tsx scripts/fetch-templates.ts"
},
"devDependencies": {
"@eslint/js": "^9.8.0",
@@ -37,6 +38,7 @@
"@pinia/testing": "^0.1.5",
"@playwright/test": "^1.44.1",
"@trivago/prettier-plugin-sort-imports": "^5.2.0",
"@types/fs-extra": "^11.0.4",
"@types/lodash": "^4.17.6",
"@types/node": "^20.14.8",
"@types/three": "^0.169.0",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 111 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.5 KiB

View File

@@ -1,673 +0,0 @@
{
"last_node_id": 40,
"last_link_id": 38,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 14
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "ControlNetApply",
"pos": [340, 267],
"size": [317.4, 98],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 10
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 11
},
{
"name": "image",
"type": "IMAGE",
"link": 12
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1280, 704, 1]
},
{
"id": 17,
"type": "VAEDecode",
"pos": [2527, 369],
"size": [210, 46],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 18
},
{
"name": "vae",
"type": "VAE",
"link": 20
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [19],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [-8, 607],
"size": [425.28, 180.61],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 35
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 38
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 13
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 16],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1071823866653712,
"randomize",
10,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 35,
"type": "CLIPTextEncode",
"pos": [1310, -72],
"size": [425.28, 180.61],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 33
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-223, -93],
"size": [422.85, 164.31],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 36
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo) girl (flat chest:0.9), (fennec ears:1.0)\u00a0 (fox ears:1.0), (messy hair), eyes, standing (school uniform sweater) sky clouds nature national park beautiful winter snow (scenery HDR landscape)\n(sunset)\n"
]
},
{
"id": 12,
"type": "LoadImage",
"pos": [-280, 287],
"size": [365, 314],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [12],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["pose_worship.png", "image"]
},
{
"id": 13,
"type": "VAELoader",
"pos": [1098, 599],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [14, 20],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["kl-f8-anime2.ckpt"]
},
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [1321, -395],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 32
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [21],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"girl (flat chest:0.9), (fennec ears:0.8)\u00a0 (fox ears:0.8), (messy hair), (highlights), (realistic starry eyes pupil:1.1), standing (school uniform sweater)\nsky clouds nature national park beautiful winter snow scenery HDR landscape\n\n(sunset)\n\n"
]
},
{
"id": 36,
"type": "CheckpointLoaderSimple",
"pos": [570, -206],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [29],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [34],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AOM3A3.safetensors"]
},
{
"id": 37,
"type": "CLIPSetLastLayer",
"pos": [933, -183],
"size": [315, 58],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 34,
"slot_index": 0
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [32, 33],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 38,
"type": "CLIPSetLastLayer",
"pos": [-733, 375],
"size": [315, 58],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 37
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [35, 36],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 39,
"type": "CheckpointLoaderSimple",
"pos": [-1100, 302],
"size": [315, 98],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [38],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [37],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 14,
"type": "LatentUpscale",
"pos": [1486, 494],
"size": [315, 130],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 16
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1920, 1088, "disabled"]
},
{
"id": 18,
"type": "SaveImage",
"pos": [2769, 370],
"size": [357.86, 262.24],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 19
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 16,
"type": "KSampler",
"pos": [2011, 248],
"size": [315, 262],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 29
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 21
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 28
},
{
"name": "latent_image",
"type": "LATENT",
"link": 17
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
284006177305237,
"randomize",
8,
5,
"dpmpp_sde",
"simple",
0.52
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [353.07, 252.57],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 11,
"type": "ControlNetLoader",
"pos": [-250, 151],
"size": [450.9, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [11],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_openpose_fp16.safetensors"]
},
{
"id": 40,
"type": "MarkdownNote",
"pos": [-1095, 480],
"size": [225, 60],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#2-pass-pose-worship)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 6, 0, 10, 0, "CONDITIONING"],
[11, 11, 0, 10, 1, "CONTROL_NET"],
[12, 12, 0, 10, 2, "IMAGE"],
[13, 10, 0, 3, 1, "CONDITIONING"],
[14, 13, 0, 8, 1, "VAE"],
[16, 3, 0, 14, 0, "LATENT"],
[17, 14, 0, 16, 3, "LATENT"],
[18, 16, 0, 17, 0, "LATENT"],
[19, 17, 0, 18, 0, "IMAGE"],
[20, 13, 0, 17, 1, "VAE"],
[21, 15, 0, 16, 1, "CONDITIONING"],
[28, 35, 0, 16, 2, "CONDITIONING"],
[29, 36, 0, 16, 0, "MODEL"],
[32, 37, 0, 15, 0, "CLIP"],
[33, 37, 0, 35, 0, "CLIP"],
[34, 36, 1, 37, 0, "CLIP"],
[35, 38, 0, 7, 0, "CLIP"],
[36, 38, 0, 6, 0, "CLIP"],
[37, 39, 1, 38, 0, "CLIP"],
[38, 39, 0, 3, 0, "MODEL"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.88,
"offset": [1252.62, 517.93]
}
},
"version": 0.4,
"models": [
{
"name": "control_v11p_sd15_openpose_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_openpose_fp16.safetensors",
"directory": "controlnet"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 196 KiB

View File

@@ -1,966 +0,0 @@
{
"last_node_id": 48,
"last_link_id": 113,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1320, 302],
"size": [210, 46],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1575, 350],
"size": [210, 250],
"flags": {},
"order": 26,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [175, 496],
"size": [425.28, 180.61],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 100
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 12,
"type": "ConditioningCombine",
"pos": [834, -246],
"size": [342.6, 46],
"flags": {
"collapsed": false
},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 63
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 57
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAELoader",
"pos": [1041, 544],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [36, 51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 35,
"type": "ConditioningCombine",
"pos": [873, -705],
"size": [342.6, 46],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 61
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [63],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [695, 531],
"size": [315, 106],
"flags": {
"collapsed": false
},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [704, 1280, 1]
},
{
"id": 22,
"type": "LatentUpscale",
"pos": [1412, 79],
"size": [315, 130],
"flags": {},
"order": 25,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 41
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1088, 1920, "disabled"]
},
{
"id": 14,
"type": "CLIPTextEncode",
"pos": [-4, -994],
"size": [400, 200],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [95],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (night:1.3) (darkness) sky (black) (stars:1.2) (galaxy:1.2) (space) (universe)"
]
},
{
"id": 18,
"type": "ConditioningSetArea",
"pos": [482, -709],
"size": [312, 154],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 96
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [62],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 320, 1]
},
{
"id": 31,
"type": "VAEDecode",
"pos": [2783.3, -41],
"size": [210, 46],
"flags": {},
"order": 28,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 50
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [87],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 32,
"type": "SaveImage",
"pos": [3012.3, -42],
"size": [315, 250],
"flags": {},
"order": 29,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 87
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "KSampler",
"pos": [2421.3, -389],
"size": [315, 262],
"flags": {},
"order": 27,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 108
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 47
},
{
"name": "latent_image",
"type": "LATENT",
"link": 42
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [50],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1122440447966177,
"randomize",
14,
7,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [152, 265],
"size": [422.85, 164.31],
"flags": {
"collapsed": false
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 99
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [93],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(masterpiece) (best quality) beautiful landscape breathtaking amazing view nature photograph forest mountains ocean (sky) national park scenery"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [885, 136],
"size": [315, 262],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 107
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 54
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 41],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
335608130539327,
"randomize",
13,
8.5,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 11,
"type": "ConditioningSetArea",
"pos": [479, -454],
"size": [314, 154],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 97,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 512, 1]
},
{
"id": 19,
"type": "ConditioningCombine",
"pos": [1180, -151],
"size": [342.6, 46],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 58
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 94
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [54],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 10,
"type": "ConditioningCombine",
"pos": [803, -149],
"size": [342.6, 46],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 40
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 93
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [94],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 15,
"type": "ConditioningSetArea",
"pos": [466, -233],
"size": [299, 154],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 704, 1]
},
{
"id": 13,
"type": "CLIPTextEncode",
"pos": [-5, -729],
"size": [400, 200],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 103
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [96],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (evening:1.2) (sky:1.2) (clouds) (colorful) (HDR:1.2) (sunset:1.3)\n"
]
},
{
"id": 17,
"type": "CLIPTextEncode",
"pos": [11, -455],
"size": [400, 200],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(best quality) (daytime:1.2) sky (blue)\n"]
},
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [16, -217],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 101
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [98],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(masterpiece) (best quality) morning sky\n\n"]
},
{
"id": 34,
"type": "ConditioningSetArea",
"pos": [476, -932],
"size": [312, 154],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 95,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [61],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 0, 1.2]
},
{
"id": 44,
"type": "CLIPSetLastLayer",
"pos": [-363, 453],
"size": [315, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 106
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [99, 100, 101, 102, 103, 104],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 45,
"type": "CheckpointLoaderSimple",
"pos": [-849, 429],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [107],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [106],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 27,
"type": "CLIPTextEncode",
"pos": [1969.3, -336],
"size": [400, 200],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 113
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 26,
"type": "CLIPTextEncode",
"pos": [1965, -580],
"size": [400, 200],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 112
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) beautiful (HDR:1.2) (realistic:1.2) landscape breathtaking amazing view nature scenery photograph forest mountains ocean daytime night evening morning, (sky:1.2)\n"
]
},
{
"id": 47,
"type": "CLIPSetLastLayer",
"pos": [1569, -403],
"size": [315, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 111
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [112, 113],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 46,
"type": "CheckpointLoaderSimple",
"pos": [1217, -496],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [108],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [111],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AbyssOrangeMix2_hard.safetensors"]
},
{
"id": 48,
"type": "MarkdownNote",
"pos": [-840, 585],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[36, 20, 0, 8, 1, "VAE"],
[40, 15, 0, 10, 0, "CONDITIONING"],
[41, 3, 0, 22, 0, "LATENT"],
[42, 22, 0, 24, 3, "LATENT"],
[46, 26, 0, 24, 1, "CONDITIONING"],
[47, 27, 0, 24, 2, "CONDITIONING"],
[49, 8, 0, 9, 0, "IMAGE"],
[50, 24, 0, 31, 0, "LATENT"],
[51, 20, 0, 31, 1, "VAE"],
[54, 19, 0, 3, 1, "CONDITIONING"],
[57, 11, 0, 12, 1, "CONDITIONING"],
[58, 12, 0, 19, 0, "CONDITIONING"],
[61, 34, 0, 35, 0, "CONDITIONING"],
[62, 18, 0, 35, 1, "CONDITIONING"],
[63, 35, 0, 12, 0, "CONDITIONING"],
[87, 31, 0, 32, 0, "IMAGE"],
[93, 6, 0, 10, 1, "CONDITIONING"],
[94, 10, 0, 19, 1, "CONDITIONING"],
[95, 14, 0, 34, 0, "CONDITIONING"],
[96, 13, 0, 18, 0, "CONDITIONING"],
[97, 17, 0, 11, 0, "CONDITIONING"],
[98, 33, 0, 15, 0, "CONDITIONING"],
[99, 44, 0, 6, 0, "CLIP"],
[100, 44, 0, 7, 0, "CLIP"],
[101, 44, 0, 33, 0, "CLIP"],
[102, 44, 0, 17, 0, "CLIP"],
[103, 44, 0, 13, 0, "CLIP"],
[104, 44, 0, 14, 0, "CLIP"],
[106, 45, 1, 44, 0, "CLIP"],
[107, 45, 0, 3, 0, "MODEL"],
[108, 46, 0, 24, 0, "MODEL"],
[111, 46, 1, 47, 0, "CLIP"],
[112, 47, 0, 26, 0, "CLIP"],
[113, 47, 0, 27, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.44,
"offset": [1558.38, 1652.18]
}
},
"version": 0.4,
"models": [
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 212 KiB

View File

@@ -1,967 +0,0 @@
{
"last_node_id": 48,
"last_link_id": 114,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1320, 302],
"size": [210, 46],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1575, 350],
"size": [210, 58],
"flags": {},
"order": 26,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [175, 496],
"size": [425.28, 180.61],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 106
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 12,
"type": "ConditioningCombine",
"pos": [834, -246],
"size": [342.6, 46],
"flags": {
"collapsed": false
},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 63
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 57
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAELoader",
"pos": [1041, 544],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [36, 51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 35,
"type": "ConditioningCombine",
"pos": [873, -705],
"size": [342.6, 46],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 61
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [63],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [695, 531],
"size": [315, 106],
"flags": {
"collapsed": false
},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [704, 1280, 1]
},
{
"id": 22,
"type": "LatentUpscale",
"pos": [1412, 79],
"size": [315, 130],
"flags": {},
"order": 25,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 41
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1088, 1920, "disabled"]
},
{
"id": 14,
"type": "CLIPTextEncode",
"pos": [-4, -994],
"size": [400, 200],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 110
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [89],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (night:1.3) (darkness) sky (black) (stars:1.2) (galaxy:1.2) (space) (universe)"
]
},
{
"id": 13,
"type": "CLIPTextEncode",
"pos": [-5, -729],
"size": [400, 200],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 109
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [91],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (evening:1.2) (sky:1.2) (clouds) (colorful) (HDR:1.2) (sunset:1.3)\n"
]
},
{
"id": 17,
"type": "CLIPTextEncode",
"pos": [11, -455],
"size": [400, 200],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 108
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [90],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(best quality) (daytime:1.2) sky (blue)\n"]
},
{
"id": 18,
"type": "ConditioningSetArea",
"pos": [482, -709],
"size": [312, 154],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 90
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [62],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 320, 1]
},
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [16, -217],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 107
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [92],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(masterpiece) (best quality) morning sky\n\n"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [152, 265],
"size": [422.85, 164.31],
"flags": {
"collapsed": false
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 105
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [93],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(masterpiece) (best quality) beautiful landscape breathtaking amazing view nature photograph forest mountains ocean (sky) national park scenery"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [885, 136],
"size": [315, 262],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 104
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 54
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 41],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
823155751257884,
"randomize",
13,
8.5,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 11,
"type": "ConditioningSetArea",
"pos": [479, -454],
"size": [314, 154],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 91,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 512, 1]
},
{
"id": 19,
"type": "ConditioningCombine",
"pos": [1180, -151],
"size": [342.6, 46],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 58
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 94
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [54],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 10,
"type": "ConditioningCombine",
"pos": [803, -149],
"size": [342.6, 46],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 40
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 93
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [94],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 34,
"type": "ConditioningSetArea",
"pos": [476, -932],
"size": [312, 154],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 92,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [61],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 0, 1]
},
{
"id": 15,
"type": "ConditioningSetArea",
"pos": [466, -233],
"size": [299, 154],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 89
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 704, 1.5]
},
{
"id": 44,
"type": "CheckpointLoaderSimple",
"pos": [-703, 444],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [104],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [111],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 46,
"type": "CLIPSetLastLayer",
"pos": [-354, 244],
"size": [315, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 111,
"slot_index": 0
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [105, 106, 107, 108, 109, 110],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 24,
"type": "KSampler",
"pos": [2220, -398],
"size": [315, 262],
"flags": {},
"order": 27,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 95
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 47
},
{
"name": "latent_image",
"type": "LATENT",
"link": 42
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [50],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
418330692116968,
"randomize",
14,
7,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 32,
"type": "SaveImage",
"pos": [2825, -62],
"size": [315, 58],
"flags": {},
"order": 29,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 87
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 31,
"type": "VAEDecode",
"pos": [2590, -61],
"size": [210, 46],
"flags": {},
"order": 28,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 50
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [87],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 26,
"type": "CLIPTextEncode",
"pos": [1781, -571],
"size": [400, 200],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 113
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) beautiful (HDR:1.2) (realistic:1.2) landscape breathtaking amazing view nature scenery photograph forest mountains ocean daytime night evening morning, (sky:1.2)\n"
]
},
{
"id": 27,
"type": "CLIPTextEncode",
"pos": [1787, -317],
"size": [400, 200],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 114
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 47,
"type": "CLIPSetLastLayer",
"pos": [1407, -402],
"size": [315, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 112
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [113, 114],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 45,
"type": "CheckpointLoaderSimple",
"pos": [1074, -444],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [95],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [112],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AbyssOrangeMix2_hard.safetensors"]
},
{
"id": 48,
"type": "MarkdownNote",
"pos": [-690, 615],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[36, 20, 0, 8, 1, "VAE"],
[40, 15, 0, 10, 0, "CONDITIONING"],
[41, 3, 0, 22, 0, "LATENT"],
[42, 22, 0, 24, 3, "LATENT"],
[46, 26, 0, 24, 1, "CONDITIONING"],
[47, 27, 0, 24, 2, "CONDITIONING"],
[49, 8, 0, 9, 0, "IMAGE"],
[50, 24, 0, 31, 0, "LATENT"],
[51, 20, 0, 31, 1, "VAE"],
[54, 19, 0, 3, 1, "CONDITIONING"],
[57, 11, 0, 12, 1, "CONDITIONING"],
[58, 12, 0, 19, 0, "CONDITIONING"],
[61, 34, 0, 35, 0, "CONDITIONING"],
[62, 18, 0, 35, 1, "CONDITIONING"],
[63, 35, 0, 12, 0, "CONDITIONING"],
[87, 31, 0, 32, 0, "IMAGE"],
[89, 14, 0, 15, 0, "CONDITIONING"],
[90, 17, 0, 18, 0, "CONDITIONING"],
[91, 13, 0, 11, 0, "CONDITIONING"],
[92, 33, 0, 34, 0, "CONDITIONING"],
[93, 6, 0, 10, 1, "CONDITIONING"],
[94, 10, 0, 19, 1, "CONDITIONING"],
[95, 45, 0, 24, 0, "MODEL"],
[104, 44, 0, 3, 0, "MODEL"],
[105, 46, 0, 6, 0, "CLIP"],
[106, 46, 0, 7, 0, "CLIP"],
[107, 46, 0, 33, 0, "CLIP"],
[108, 46, 0, 17, 0, "CLIP"],
[109, 46, 0, 13, 0, "CLIP"],
[110, 46, 0, 14, 0, "CLIP"],
[111, 44, 1, 46, 0, "CLIP"],
[112, 45, 1, 47, 0, "CLIP"],
[113, 47, 0, 26, 0, "CLIP"],
[114, 47, 0, 27, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.79,
"offset": [1022.96, -230.7]
}
},
"version": 0.4,
"models": [
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 207 KiB

View File

@@ -1,620 +0,0 @@
{
"last_node_id": 50,
"last_link_id": 108,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1320, 302],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [175, 496],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 101
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 27,
"type": "CLIPTextEncode",
"pos": [1570, -336],
"size": [400, 200],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 103
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 22,
"type": "LatentUpscale",
"pos": [1412, 79],
"size": [315, 130],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 41
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1920, 1088, "disabled"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [695, 531],
"size": [315, 106],
"flags": {
"collapsed": false
},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1280, 704, 1]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1556, 303],
"size": [210, 250],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [156, 269],
"size": [422.85, 164.31],
"flags": {
"collapsed": false
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [98],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo:1.3) (best quality) (HDR:1.0) colourful, nature wilderness snow mountain peak, (winter:1.2), on landscape mountain in Switzerland alps sunset, aerial view (cityscape:1.3) skyscrapers modern city satellite view, (sunset)\ngirl with fennec ears fox ears, sweater, sitting\n"
]
},
{
"id": 47,
"type": "ConditioningCombine",
"pos": [530, 71],
"size": [342.6, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 97
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [99],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 45,
"type": "CLIPTextEncode",
"pos": [-88, -224],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 105,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [93],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo:1.3) (best quality) (HDR:1.0) girl colourful of (flat chest:0.9), (fennec ears:1.0)\u00a0 (fox ears:1.0), blonde twintails medium (messy hair:1.2), (eyes:1.0), sweater, (pink:0.8) , long sleeves, sweatpants (pants), gloves, nature wilderness (sitting:1.3) on snow mountain peak, (:d:0.5) (blush:0.9), (winter:1.2), on landscape mountain in Switzerland alps sunset, comfortable, (spread legs:1.1), aerial view (cityscape:1.3) skyscrapers modern city satellite view, (sunset)"
]
},
{
"id": 31,
"type": "VAEDecode",
"pos": [2419, 10],
"size": [210, 46],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 50
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 46,
"type": "ConditioningSetArea",
"pos": [344, -227],
"size": [317.4, 154],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 93
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [640, 640, 0, 64, 1]
},
{
"id": 26,
"type": "CLIPTextEncode",
"pos": [1573, -583],
"size": [400, 200],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece solo (realistic) (best quality) (HDR:1.0) girl colourful of (flat chest:0.9), (fox ears:0.9), blonde twintails messy hair, (eyes:1.0), sweater, (pink:0.8) , long sleeves, sweatpants pants, gloves, nature wilderness sitting on snow mountain peak aerial view, (:d:0.5) (blush:0.9), (winter:0.9), mountain in Switzerland, comfortable, aerial view (cityscape:1.2) skyscrapers modern city satellite view, (sunset)\n"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [885, 136],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 106
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 99
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 41],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
830459492315490,
"randomize",
13,
7,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 32,
"type": "SaveImage",
"pos": [2648, -11],
"size": [210, 250],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 100
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "KSampler",
"pos": [2047, -270],
"size": [315, 262],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 107
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 47
},
{
"name": "latent_image",
"type": "LATENT",
"link": 42
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [50],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
626842672818096,
"randomize",
7,
5,
"dpmpp_sde",
"simple",
0.52
]
},
{
"id": 20,
"type": "VAELoader",
"pos": [1086, 563],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [36, 51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 49,
"type": "CLIPSetLastLayer",
"pos": [-227, 630],
"size": [315, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 108
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [101, 102, 103, 104, 105],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 48,
"type": "CheckpointLoaderSimple",
"pos": [-621, 603],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [106, 107],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [108],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 50,
"type": "MarkdownNote",
"pos": [-615, 765],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/#increasing-consistency-of-images-with-area-composition)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[36, 20, 0, 8, 1, "VAE"],
[41, 3, 0, 22, 0, "LATENT"],
[42, 22, 0, 24, 3, "LATENT"],
[46, 26, 0, 24, 1, "CONDITIONING"],
[47, 27, 0, 24, 2, "CONDITIONING"],
[49, 8, 0, 9, 0, "IMAGE"],
[50, 24, 0, 31, 0, "LATENT"],
[51, 20, 0, 31, 1, "VAE"],
[93, 45, 0, 46, 0, "CONDITIONING"],
[97, 46, 0, 47, 0, "CONDITIONING"],
[98, 6, 0, 47, 1, "CONDITIONING"],
[99, 47, 0, 3, 1, "CONDITIONING"],
[100, 31, 0, 32, 0, "IMAGE"],
[101, 49, 0, 7, 0, "CLIP"],
[102, 49, 0, 6, 0, "CLIP"],
[103, 49, 0, 27, 0, "CLIP"],
[104, 49, 0, 26, 0, "CLIP"],
[105, 49, 0, 45, 0, "CLIP"],
[106, 48, 0, 3, 0, "MODEL"],
[107, 48, 0, 24, 0, "MODEL"],
[108, 48, 1, 49, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.53,
"offset": [1214.17, 1188.8]
}
},
"version": 0.4,
"models": [
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 395 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 109 KiB

View File

@@ -1,385 +0,0 @@
{
"last_node_id": 15,
"last_link_id": 21,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1210, 250],
"size": [210, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 14
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-42, -147],
"size": [422.85, 164.31],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 21
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo) girl (flat chest:0.9), (fennec ears:1.1)\u00a0 (fox ears:1.1), (blonde hair:1.0), messy hair, sky clouds, standing in a grass field, (chibi), blue eyes"
]
},
{
"id": 12,
"type": "ControlNetLoader",
"pos": [-50, 69],
"size": [422, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_scribble_fp16.safetensors"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [355, 213],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [16],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis)"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [439, 446],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 13,
"type": "VAELoader",
"pos": [833, 484],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [14],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 10,
"type": "ControlNetApply",
"pos": [459, 51],
"size": [317.4, 98],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 10
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 13
},
{
"name": "image",
"type": "IMAGE",
"link": 12
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [0.9]
},
{
"id": 11,
"type": "LoadImage",
"pos": [-70, 177],
"size": [387.97, 465.51],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [12],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["input_scribble_example.png", "image"]
},
{
"id": 14,
"type": "CheckpointLoaderSimple",
"pos": [-448, 231],
"size": [315, 98],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [19],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [20, 21],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1453, 247],
"size": [393.62, 449.16],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [842, 150],
"size": [315, 262],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 19
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 18
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 16
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1002496614778823,
"randomize",
16,
6,
"uni_pc",
"normal",
1
]
},
{
"id": 15,
"type": "MarkdownNote",
"pos": [-450, 375],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 6, 0, 10, 0, "CONDITIONING"],
[12, 11, 0, 10, 2, "IMAGE"],
[13, 12, 0, 10, 1, "CONTROL_NET"],
[14, 13, 0, 8, 1, "VAE"],
[16, 7, 0, 3, 2, "CONDITIONING"],
[18, 10, 0, 3, 1, "CONDITIONING"],
[19, 14, 0, 3, 0, "MODEL"],
[20, 14, 1, 7, 0, "CLIP"],
[21, 14, 1, 6, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.8,
"offset": [843.77, 555.93]
}
},
"version": 0.4,
"models": [
{
"name": "control_v11p_sd15_scribble_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_scribble_fp16.safetensors?download=true",
"directory": "controlnet"
},
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 35 KiB

View File

@@ -1,373 +0,0 @@
{
"last_node_id": 9,
"last_link_id": 9,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [
413,
389
],
"size": [
425.27801513671875,
180.6060791015625
],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
6
],
"slot_index": 0
}
],
"properties": {},
"widgets_values": [
"text, watermark"
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
415,
186
],
"size": [
422.84503173828125,
164.31304931640625
],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
4
],
"slot_index": 0
}
],
"properties": {},
"widgets_values": [
"beautiful scenery nature glass bottle landscape, , purple galaxy bottle,"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [
473,
609
],
"size": [
315,
106
],
"flags": {},
"order": 1,
"mode": 0,
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
2
],
"slot_index": 0
}
],
"properties": {},
"widgets_values": [
512,
512,
1
]
},
{
"id": 3,
"type": "KSampler",
"pos": [
863,
186
],
"size": [
315,
262
],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
7
],
"slot_index": 0
}
],
"properties": {},
"widgets_values": [
156680208700286,
true,
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
1209,
188
],
"size": [
210,
46
],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
9
],
"slot_index": 0
}
],
"properties": {}
},
{
"id": 9,
"type": "SaveImage",
"pos": [
1451,
189
],
"size": [
210,
26
],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"properties": {}
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [
26,
474
],
"size": [
315,
98
],
"flags": {},
"order": 0,
"mode": 0,
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
1
],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [
3,
5
],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
8
],
"slot_index": 2
}
],
"properties": {},
"widgets_values": [
"v1-5-pruned-emaonly-fp16.safetensors"
]
},
{
"id": 10,
"type": "MarkdownNote",
"pos": [30, 630],
"size": [315, 88],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"🛈 Need more details? Check out [docs.comfy.org/get_started](https://docs.comfy.org/get_started/gettingstarted)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[
1,
4,
0,
3,
0,
"MODEL"
],
[
2,
5,
0,
3,
3,
"LATENT"
],
[
3,
4,
1,
6,
0,
"CLIP"
],
[
4,
6,
0,
3,
1,
"CONDITIONING"
],
[
5,
4,
1,
7,
0,
"CLIP"
],
[
6,
7,
0,
3,
2,
"CONDITIONING"
],
[
7,
3,
0,
8,
0,
"LATENT"
],
[
8,
4,
2,
8,
1,
"VAE"
],
[
9,
8,
0,
9,
0,
"IMAGE"
]
],
"groups": [],
"config": {},
"extra": {},
"version": 0.4,
"models": [{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
}]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 137 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.4 KiB

View File

@@ -1,378 +0,0 @@
{
"last_node_id": 35,
"last_link_id": 52,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1053, 172],
"size": [210, 46],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 23,
"type": "ControlNetApply",
"pos": [593.6, -388.0],
"size": [317.4, 98],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 42
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 47
},
{
"name": "image",
"type": "IMAGE",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1.0]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [259, 463],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [832, 384, 1]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [169, 212],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 49
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2)"
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1310, 169],
"size": [516.05, 301.24],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 33,
"type": "DiffControlNetLoader",
"pos": [131, -338],
"size": [421.93, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 48
}
],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DiffControlNetLoader"
},
"widgets_values": ["diff_control_sd15_depth_fp16.safetensors"]
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-305, -435],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 50
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["underwater photograph shark\n\n\n\n"]
},
{
"id": 3,
"type": "KSampler",
"pos": [699, 167],
"size": [315, 262],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 52
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
891858402356003,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 20,
"type": "LoadImage",
"pos": [135, -234],
"size": [429.73, 314],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [34],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["shark_depthmap.png", "image"]
},
{
"id": 34,
"type": "CheckpointLoaderSimple",
"pos": [-281, 110],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [48, 52],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [49, 50],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [51],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 35,
"type": "MarkdownNote",
"pos": [-270, 255],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#t2i-adapter-vs-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[34, 20, 0, 23, 2, "IMAGE"],
[40, 23, 0, 3, 1, "CONDITIONING"],
[42, 24, 0, 23, 0, "CONDITIONING"],
[47, 33, 0, 23, 1, "CONTROL_NET"],
[48, 34, 0, 33, 0, "MODEL"],
[49, 34, 1, 7, 0, "CLIP"],
[50, 34, 1, 24, 0, "CLIP"],
[51, 34, 2, 8, 1, "VAE"],
[52, 34, 0, 3, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Apply Depth ControlNet",
"bounding": [210, -480, 739, 336],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.86,
"offset": [671.97, 711.84]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "diff_control_sd15_depth_fp16.safetensors",
"url": "https://huggingface.co/kohya-ss/ControlNet-diff-modules/resolve/main/diff_control_sd15_depth_fp16.safetensors?download=true",
"directory": "controlnet"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 170 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.4 KiB

View File

@@ -1,371 +0,0 @@
{
"last_node_id": 34,
"last_link_id": 49,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1053, 172],
"size": [210, 46],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 49
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [259, 463],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [832, 384, 1]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [169, 212],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 47
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2)"
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1310, 169],
"size": [516.05, 301.24],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [699, 167],
"size": [315, 262],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 46
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
891858402356003,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 23,
"type": "ControlNetApply",
"pos": [553, -289],
"size": [317.4, 98],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 42
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 44
},
{
"name": "image",
"type": "IMAGE",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1.0]
},
{
"id": 31,
"type": "ControlNetLoader",
"pos": [168, -286],
"size": [345, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [44],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["t2iadapter_depth_sd14v1.pth"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [88, -174],
"size": [413, 314],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [34],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["shark_depthmap.png", "image"]
},
{
"id": 33,
"type": "CheckpointLoaderSimple",
"pos": [-349, 161],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [46],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [47, 48],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [49],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-360, -261],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 48
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["underwater photograph shark\n\n\n\n"]
},
{
"id": 34,
"type": "MarkdownNote",
"pos": [-345, 300],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#t2i-adapter-vs-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[34, 20, 0, 23, 2, "IMAGE"],
[40, 23, 0, 3, 1, "CONDITIONING"],
[42, 24, 0, 23, 0, "CONDITIONING"],
[44, 31, 0, 23, 1, "CONTROL_NET"],
[46, 33, 0, 3, 0, "MODEL"],
[47, 33, 1, 7, 0, "CLIP"],
[48, 33, 1, 24, 0, "CLIP"],
[49, 33, 2, 8, 1, "VAE"]
],
"groups": [
{
"id": 1,
"title": "Apply Depth T2I-Adapter",
"bounding": [150, -375, 739, 336],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.84,
"offset": [737.68, 680.26]
}
},
"version": 0.4,
"models": [
{
"name": "t2iadapter_depth_sd14v1.pth",
"url": "https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_depth_sd14v1.pth?download=true",
"directory": "controlnet"
},
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 146 KiB

View File

@@ -1,267 +0,0 @@
{
"last_node_id": 10,
"last_link_id": 9,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph in the style of embedding:SDA768.pt girl with blonde hair\nlandscape scenery view"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [26, 474],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [469, 528],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
193694018275622,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 10,
"type": "MarkdownNote",
"pos": [30, 630],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/textual_inversion_embeddings/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[3, 4, 1, 6, 0, "CLIP"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[5, 4, 1, 7, 0, "CLIP"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.84,
"offset": [498.31, 149.5]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 324 KiB

View File

@@ -1,329 +0,0 @@
{
"last_node_id": 15,
"last_link_id": 19,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 18
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 17
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece best quality girl standing in victorian clothing"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 19
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1791, 169],
"size": [455.99, 553.09],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 16
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 14,
"type": "ImageUpscaleWithModel",
"pos": [1506, 151],
"size": [241.8, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "upscale_model",
"type": "UPSCALE_MODEL",
"link": 14
},
{
"name": "image",
"type": "IMAGE",
"link": 15
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [16],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageUpscaleWithModel"
},
"widgets_values": []
},
{
"id": 13,
"type": "UpscaleModelLoader",
"pos": [1128, 51],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "UPSCALE_MODEL",
"type": "UPSCALE_MODEL",
"links": [14],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UpscaleModelLoader"
},
"widgets_values": ["RealESRGAN_x4plus.pth"]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
833543590226030,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-11, 307],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [17, 18],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [19],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 15,
"type": "MarkdownNote",
"pos": [0, 465],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/upscale_models/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[14, 13, 0, 14, 0, "UPSCALE_MODEL"],
[15, 8, 0, 14, 1, "IMAGE"],
[16, 14, 0, 9, 0, "IMAGE"],
[17, 4, 1, 6, 0, "CLIP"],
[18, 4, 1, 7, 0, "CLIP"],
[19, 4, 2, 8, 1, "VAE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.82,
"offset": [400.67, 431.06]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "RealESRGAN_x4plus.pth",
"url": "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth",
"directory": "upscale_models"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 142 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 32 KiB

View File

@@ -1,474 +0,0 @@
{
"last_node_id": 36,
"last_link_id": 70,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [68],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 17,
"type": "LoadImage",
"pos": [220, 530],
"size": [315, 314.0],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [49],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 19,
"type": "PreviewImage",
"pos": [899, 532],
"size": [571.59, 625.53],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 26
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [1290, 40],
"size": [315, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 57
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 64
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 65
},
{
"name": "latent_image",
"type": "LATENT",
"link": 66
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
50363905047731,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 35,
"type": "InstructPixToPixConditioning",
"pos": [1040, 50],
"size": [235.2, 86],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 67
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 68
},
{
"name": "vae",
"type": "VAE",
"link": 69
},
{
"name": "pixels",
"type": "IMAGE",
"link": 70
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [64],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [65],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [66],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InstructPixToPixConditioning"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 40],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1850, 40],
"size": [828.95, 893.85],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 32,
"type": "VAELoader",
"pos": [1290, 350],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 69],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [700, 50],
"size": [317.4, 58],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [67],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [30]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [260, 50],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a pink sweater and jeans"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-80, 110],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [710, -80],
"size": [315, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-canny-dev.safetensors", "default"]
},
{
"id": 18,
"type": "Canny",
"pos": [560, 530],
"size": [315, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 49
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [26, 70],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "Canny"
},
"widgets_values": [0.15, 0.3]
},
{
"id": 36,
"type": "MarkdownNote",
"pos": [-75, 270],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#canny-and-depth)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[26, 18, 0, 19, 0, "IMAGE"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[49, 17, 0, 18, 0, "IMAGE"],
[57, 31, 0, 3, 0, "MODEL"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[64, 35, 0, 3, 1, "CONDITIONING"],
[65, 35, 1, 3, 2, "CONDITIONING"],
[66, 35, 2, 3, 3, "LATENT"],
[67, 26, 0, 35, 0, "CONDITIONING"],
[68, 7, 0, 35, 1, "CONDITIONING"],
[69, 32, 0, 35, 2, "VAE"],
[70, 18, 0, 35, 3, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.67,
"offset": [553.16, 455.34]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-canny-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Canny-dev/resolve/main/flux1-canny-dev.safetensors?download=true",
"directory": "diffusion_models"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 120 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.9 KiB

View File

@@ -1,454 +0,0 @@
{
"last_node_id": 40,
"last_link_id": 76,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 98],
"size": [210, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [68],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-238, 112],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 17,
"type": "LoadImage",
"pos": [307, 342],
"size": [315, 314.0],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [71],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["shark_depthmap.png", "image"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [621, 8],
"size": [317.4, 58],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [67],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [10]
},
{
"id": 35,
"type": "InstructPixToPixConditioning",
"pos": [1018, 124],
"size": [235.2, 86],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 67
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 68
},
{
"name": "vae",
"type": "VAE",
"link": 69
},
{
"name": "pixels",
"type": "IMAGE",
"link": 71
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [64],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [65],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [73],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InstructPixToPixConditioning"
},
"widgets_values": []
},
{
"id": 32,
"type": "VAELoader",
"pos": [656, 165],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 69],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1865, 98],
"size": [722.41, 425.77],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 37,
"type": "LoraLoaderModelOnly",
"pos": [624, -172],
"size": [315, 82],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 74
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [76],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": ["flux1-depth-dev-lora.safetensors", 1]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [115, -17],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["a photograph of a shark in the sea"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 3,
"type": "KSampler",
"pos": [1280, 100],
"size": [315, 262],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 76
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 64
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 65
},
{
"name": "latent_image",
"type": "LATENT",
"link": 73
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
91050358797301,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [249, -171],
"size": [315, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [74],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-depth-dev.safetensors", "default"]
},
{
"id": 40,
"type": "MarkdownNote",
"pos": [-225, 270],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#canny-and-depth)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[64, 35, 0, 3, 1, "CONDITIONING"],
[65, 35, 1, 3, 2, "CONDITIONING"],
[67, 26, 0, 35, 0, "CONDITIONING"],
[68, 7, 0, 35, 1, "CONDITIONING"],
[69, 32, 0, 35, 2, "VAE"],
[71, 17, 0, 35, 3, "IMAGE"],
[73, 35, 2, 3, 3, "LATENT"],
[74, 31, 0, 37, 0, "MODEL"],
[76, 37, 0, 3, 0, "MODEL"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.65,
"offset": [724.57, 776.23]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-depth-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Depth-dev/resolve/main/flux1-depth-dev.safetensors?download=true",
"directory": "diffusion_models"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "flux1-depth-dev-lora.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Depth-dev-lora/resolve/main/flux1-depth-dev-lora.safetensors?download=true",
"directory": "loras"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 183 KiB

View File

@@ -1,332 +0,0 @@
{
"last_node_id": 37,
"last_link_id": 57,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 192],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 45
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [56],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a maid outfit with a long black gold leaf pattern dress and a white apron mouth open placing a fancy black forest cake with candles on top of a dinner table of an old dark Victorian mansion lit by candlelight with a bright window to the foggy forest and very expensive stuff everywhere there are paintings on the walls"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1151, 195],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 52
},
{
"name": "vae",
"type": "VAE",
"link": 46
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1375, 194],
"size": [985.3, 1060.38],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [471, 455],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 30,
"type": "CheckpointLoaderSimple",
"pos": [48, 192],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [47],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [45, 54],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [46],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["flux1-dev-fp8.safetensors"]
},
{
"id": 31,
"type": "KSampler",
"pos": [816, 192],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 47
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 57
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 55
},
{
"name": "latent_image",
"type": "LATENT",
"link": 51
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [52],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
972054013131368,
"randomize",
20,
1,
"euler",
"simple",
1
]
},
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [390, 400],
"size": [422.85, 164.31],
"flags": {
"collapsed": true
},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 54,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [55],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 35,
"type": "FluxGuidance",
"pos": [576, 96],
"size": [211.6, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 56
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [3.5]
},
{
"id": 37,
"type": "MarkdownNote",
"pos": [60, 345],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-dev-1)"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 34,
"type": "Note",
"pos": [825, 510],
"size": [282.86, 164.08],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"Note that Flux dev and schnell do not have any negative prompt so CFG should be set to 1.0. Setting CFG to 1.0 means the negative prompt is ignored."
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[45, 30, 1, 6, 0, "CLIP"],
[46, 30, 2, 8, 1, "VAE"],
[47, 30, 0, 31, 0, "MODEL"],
[51, 27, 0, 31, 3, "LATENT"],
[52, 31, 0, 8, 0, "LATENT"],
[54, 30, 1, 33, 0, "CLIP"],
[55, 33, 0, 31, 2, "CONDITIONING"],
[56, 6, 0, 35, 0, "CONDITIONING"],
[57, 35, 0, 31, 1, "CONDITIONING"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.8,
"offset": [350.72, 161.55]
}
},
"version": 0.4,
"models": [
{
"name": "flux1-dev-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-dev/resolve/main/flux1-dev-fp8.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,771 +0,0 @@
{
"last_node_id": 38,
"last_link_id": 116,
"nodes": [
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [48, 288],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"t5xxl_fp16.safetensors",
"clip_l.safetensors",
"flux",
"default"
]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [480, 1008],
"size": [315, 106],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 55,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [20]
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": ["simple", 20, 1]
},
{
"id": 16,
"type": "KSamplerSelect",
"pos": [480, 912],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [19]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [480, 144],
"size": [317.4, 58],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [3.5],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 22,
"type": "BasicGuider",
"pos": [576, 48],
"size": [222.35, 46],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 42,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [864, 192],
"size": [272.36, 124.54],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 116,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [24],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 25,
"type": "RandomNoise",
"pos": [480, 768],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [219670278747233, "randomize"],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [866, 367],
"size": [210, 46],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 24
},
{
"name": "vae",
"type": "VAE",
"link": 12
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 240],
"size": [422.85, 164.31],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 10
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a maid outfit with a long black gold leaf pattern dress and a white apron mouth open holding a fancy black forest cake with candles on top in the kitchen of an old dark Victorian mansion lit by candlelight with a bright window to the foggy forest and very expensive stuff everywhere"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 30,
"type": "ModelSamplingFlux",
"pos": [480, 1152],
"size": [315, 130],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56,
"slot_index": 0
},
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 115,
"slot_index": 1
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 114,
"slot_index": 2
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [54, 55],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingFlux"
},
"widgets_values": [1.15, 0.5, 1024, 1024]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": [315, 106],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 112
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 113
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [116],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 34,
"type": "PrimitiveNode",
"pos": [432, 480],
"size": [210, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "width"
},
"links": [112, 115],
"slot_index": 0
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 35,
"type": "PrimitiveNode",
"pos": [672, 480],
"size": [210, 82],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "height"
},
"links": [113, 114],
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [48, 144],
"size": [315, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [56],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-dev.safetensors", "default"],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 9,
"type": "SaveImage",
"pos": [1155, 196],
"size": [985.3, 1060.38],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 37,
"type": "Note",
"pos": [480, 1344],
"size": [315.0, 117.98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"The reference sampling implementation auto adjusts the shift value based on the resolution, if you don't want this you can just bypass (CTRL-B) this ModelSamplingFlux node.\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "VAELoader",
"pos": [48, 432],
"size": [311.82, 60.43],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 28,
"type": "Note",
"pos": [48, 576],
"size": [336, 288],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"If you get an error in any of the nodes above make sure the files are in the correct directories.\n\nSee the top of the examples page for the links : https://comfyanonymous.github.io/ComfyUI_examples/flux/\n\nflux1-dev.safetensors goes in: ComfyUI/models/unet/\n\nt5xxl_fp16.safetensors and clip_l.safetensors go in: ComfyUI/models/clip/\n\nae.safetensors goes in: ComfyUI/models/vae/\n\n\nTip: You can set the weight_dtype above to one of the fp8 types if you have memory issues."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 38,
"type": "MarkdownNote",
"pos": [45, 930],
"size": [225, 60],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-dev-1)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[10, 11, 0, 6, 0, "CLIP"],
[12, 10, 0, 8, 1, "VAE"],
[19, 16, 0, 13, 2, "SAMPLER"],
[20, 17, 0, 13, 3, "SIGMAS"],
[24, 13, 0, 8, 0, "LATENT"],
[30, 22, 0, 13, 1, "GUIDER"],
[37, 25, 0, 13, 0, "NOISE"],
[41, 6, 0, 26, 0, "CONDITIONING"],
[42, 26, 0, 22, 1, "CONDITIONING"],
[54, 30, 0, 22, 0, "MODEL"],
[55, 30, 0, 17, 0, "MODEL"],
[56, 12, 0, 30, 0, "MODEL"],
[112, 34, 0, 27, 0, "INT"],
[113, 35, 0, 27, 1, "INT"],
[114, 35, 0, 30, 2, "INT"],
[115, 34, 0, 30, 1, "INT"],
[116, 27, 0, 13, 4, "LATENT"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [-0.18, 2.29]
},
"groupNodes": {
"EmptyLatentImage": {
"nodes": [
{
"type": "PrimitiveNode",
"pos": [432, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 6,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"widget": {
"name": "height"
},
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 0
},
{
"type": "PrimitiveNode",
"pos": [672, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 7,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"slot_index": 0,
"widget": {
"name": "width"
}
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 1
},
{
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"link": null,
"widget": {
"name": "width"
}
},
{
"name": "height",
"type": "INT",
"link": null,
"widget": {
"name": "height"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"index": 2
}
],
"links": [
[1, 0, 2, 0, 34, "INT"],
[0, 0, 2, 1, 35, "INT"]
],
"external": [
[0, 0, "INT"],
[1, 0, "INT"],
[2, 0, "LATENT"]
],
"config": {
"0": {
"output": {
"0": {
"name": "height"
}
},
"input": {
"value": {
"visible": true
}
}
},
"1": {
"output": {
"0": {
"name": "width"
}
},
"input": {
"value": {
"visible": true
}
}
},
"2": {
"input": {
"width": {
"visible": false
},
"height": {
"visible": false
}
}
}
}
}
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/flux1-dev.safetensors?download=true",
"directory": "diffusion_models"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 160 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 50 KiB

View File

@@ -1,458 +0,0 @@
{
"last_node_id": 45,
"last_link_id": 100,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [81],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 32,
"type": "VAELoader",
"pos": [1352, 422],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 82],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [593, 44],
"size": [317.4, 58],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [80],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [30]
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-237, 79],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 39,
"type": "DifferentialDiffusion",
"pos": [1001, -68],
"size": [277.2, 26],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 85
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [86],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DifferentialDiffusion"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 98],
"size": [210, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 38,
"type": "InpaintModelConditioning",
"pos": [952, 78],
"size": [302.4, 138],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 80
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 81
},
{
"name": "vae",
"type": "VAE",
"link": 82
},
{
"name": "pixels",
"type": "IMAGE",
"link": 99
},
{
"name": "mask",
"type": "MASK",
"link": 100
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [77],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [78],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [88],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InpaintModelConditioning"
},
"widgets_values": [false]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1877, 101],
"size": [828.95, 893.85],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 95
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [1280, 100],
"size": [315, 262],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 86
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 77
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 78
},
{
"name": "latent_image",
"type": "LATENT",
"link": 88
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
656821733471329,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [602, -120],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [85],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-fill-dev.safetensors", "default"]
},
{
"id": 17,
"type": "LoadImage",
"pos": [587, 312],
"size": [315, 314.0],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [99],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [100],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_inpaint_example.png", "image"]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [144, -7],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"anime girl with massive fennec ears blonde hair blue eyes wearing a pink shirt"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 45,
"type": "MarkdownNote",
"pos": [-225, 255],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#fill-inpainting-model)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[77, 38, 0, 3, 1, "CONDITIONING"],
[78, 38, 1, 3, 2, "CONDITIONING"],
[80, 26, 0, 38, 0, "CONDITIONING"],
[81, 7, 0, 38, 1, "CONDITIONING"],
[82, 32, 0, 38, 2, "VAE"],
[85, 31, 0, 39, 0, "MODEL"],
[86, 39, 0, 3, 0, "MODEL"],
[88, 38, 2, 3, 3, "LATENT"],
[95, 8, 0, 9, 0, "IMAGE"],
[99, 17, 0, 38, 3, "IMAGE"],
[100, 17, 1, 38, 4, "MASK"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.21,
"offset": [566.62, 207.73]
}
},
"version": 0.4,
"models": [
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "flux1-fill-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Fill-dev/blob/main/flux1-fill-dev.safetensors",
"directory": "diffusion_models"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 402 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 100 KiB

View File

@@ -1,491 +0,0 @@
{
"last_node_id": 45,
"last_link_id": 98,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [81],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 32,
"type": "VAELoader",
"pos": [1352, 422],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 82],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [593, 44],
"size": [317.4, 58],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [80],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [30]
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-237, 79],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 39,
"type": "DifferentialDiffusion",
"pos": [1001, -68],
"size": [277.2, 26],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 85
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [86],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DifferentialDiffusion"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 98],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 38,
"type": "InpaintModelConditioning",
"pos": [952, 78],
"size": [302.4, 138],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 80
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 81
},
{
"name": "vae",
"type": "VAE",
"link": 82
},
{
"name": "pixels",
"type": "IMAGE",
"link": 97
},
{
"name": "mask",
"type": "MASK",
"link": 98
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [77],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [78],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [88],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InpaintModelConditioning"
},
"widgets_values": [false]
},
{
"id": 44,
"type": "ImagePadForOutpaint",
"pos": [415, 359],
"size": [315, 174],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 96
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [97],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [98],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ImagePadForOutpaint"
},
"widgets_values": [400, 0, 400, 400, 24]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [144, -7],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["beautiful scenery"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 9,
"type": "SaveImage",
"pos": [1877, 101],
"size": [828.95, 893.85],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 95
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [1280, 100],
"size": [315, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 86
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 77
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 78
},
{
"name": "latent_image",
"type": "LATENT",
"link": 88
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
164211176398261,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 17,
"type": "LoadImage",
"pos": [23, 376],
"size": [315, 314.0],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [96],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [602, -120],
"size": [315, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [85],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-fill-dev.safetensors", "default"]
},
{
"id": 45,
"type": "MarkdownNote",
"pos": [-225, 255],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#fill-inpainting-model)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[77, 38, 0, 3, 1, "CONDITIONING"],
[78, 38, 1, 3, 2, "CONDITIONING"],
[80, 26, 0, 38, 0, "CONDITIONING"],
[81, 7, 0, 38, 1, "CONDITIONING"],
[82, 32, 0, 38, 2, "VAE"],
[85, 31, 0, 39, 0, "MODEL"],
[86, 39, 0, 3, 0, "MODEL"],
[88, 38, 2, 3, 3, "LATENT"],
[95, 8, 0, 9, 0, "IMAGE"],
[96, 17, 0, 44, 0, "IMAGE"],
[97, 44, 0, 38, 3, "IMAGE"],
[98, 44, 1, 38, 4, "MASK"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1,
"offset": [240.64, 211.87]
}
},
"version": 0.4,
"models": [
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "flux1-fill-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Fill-dev/blob/main/flux1-fill-dev.safetensors",
"directory": "diffusion_models"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 236 KiB

View File

@@ -1,951 +0,0 @@
{
"last_node_id": 44,
"last_link_id": 123,
"nodes": [
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [48, 288],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"t5xxl_fp16.safetensors",
"clip_l.safetensors",
"flux",
"default"
]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [480, 1008],
"size": [315, 106],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 55,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [20]
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": ["simple", 20, 1]
},
{
"id": 16,
"type": "KSamplerSelect",
"pos": [480, 912],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [19]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [480, 144],
"size": [317.4, 58],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [122],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [3.5],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [864, 192],
"size": [272.36, 124.54],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 116,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [24],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 25,
"type": "RandomNoise",
"pos": [480, 768],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [958831004022715, "randomize"],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [866, 367],
"size": [210, 46],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 24
},
{
"name": "vae",
"type": "VAE",
"link": 12
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 30,
"type": "ModelSamplingFlux",
"pos": [480, 1152],
"size": [315, 130],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56,
"slot_index": 0
},
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 115,
"slot_index": 1
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 114,
"slot_index": 2
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [54, 55],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingFlux"
},
"widgets_values": [1.15, 0.5, 1024, 1024]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": [315, 106],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 112
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 113
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [116],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 34,
"type": "PrimitiveNode",
"pos": [432, 480],
"size": [210, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "width"
},
"links": [112, 115],
"slot_index": 0
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 35,
"type": "PrimitiveNode",
"pos": [672, 480],
"size": [210, 82],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "height"
},
"links": [113, 114],
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [48, 144],
"size": [315, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [56],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-dev.safetensors", "default"],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 9,
"type": "SaveImage",
"pos": [1155, 196],
"size": [985.3, 1060.38],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 37,
"type": "Note",
"pos": [480, 1344],
"size": [315.0, 117.98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"The reference sampling implementation auto adjusts the shift value based on the resolution, if you don't want this you can just bypass (CTRL-B) this ModelSamplingFlux node.\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "VAELoader",
"pos": [48, 432],
"size": [311.82, 60.43],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 28,
"type": "Note",
"pos": [48, 576],
"size": [336, 288],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"If you get an error in any of the nodes above make sure the files are in the correct directories.\n\nSee the top of the examples page for the links : https://comfyanonymous.github.io/ComfyUI_examples/flux/\n\nflux1-dev.safetensors goes in: ComfyUI/models/diffusion_models/\n\nt5xxl_fp16.safetensors and clip_l.safetensors go in: ComfyUI/models/text_encoders/\n\nae.safetensors goes in: ComfyUI/models/vae/\n\n\nTip: You can set the weight_dtype above to one of the fp8 types if you have memory issues."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 39,
"type": "CLIPVisionEncode",
"pos": [420, -300],
"size": [290, 78],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 117
},
{
"name": "image",
"type": "IMAGE",
"link": 118
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [120],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 40,
"type": "LoadImage",
"pos": [60, -300],
"size": [315, 314],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [118]
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 42,
"type": "StyleModelLoader",
"pos": [400, -180],
"size": [340, 60],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STYLE_MODEL",
"type": "STYLE_MODEL",
"links": [119]
}
],
"properties": {
"Node name for S&R": "StyleModelLoader"
},
"widgets_values": ["flux1-redux-dev.safetensors"]
},
{
"id": 38,
"type": "CLIPVisionLoader",
"pos": [60, -410],
"size": [370, 60],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"links": [117],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": ["sigclip_vision_patch14_384.safetensors"]
},
{
"id": 41,
"type": "StyleModelApply",
"pos": [760, -300],
"size": [320, 122],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 122
},
{
"name": "style_model",
"type": "STYLE_MODEL",
"link": 119
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"shape": 7,
"link": 120
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [123],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "StyleModelApply"
},
"widgets_values": [1, "multiply"]
},
{
"id": 22,
"type": "BasicGuider",
"pos": [960, 66],
"size": [222.35, 46],
"flags": {},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 123,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 43,
"type": "Note",
"pos": [1130, -440],
"size": [345.9, 182.31],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"The redux model lets you prompt with images. It can be used with any Flux1 dev or schnell model workflow.\n\nYou can chain multiple \"Apply Style Model\" nodes if you want to mix multiple images together."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 240],
"size": [422.85, 164.31],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 10
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["cute anime girl with massive fluffy fennec ears"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 44,
"type": "MarkdownNote",
"pos": [60, 915],
"size": [225, 60],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#redux)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[10, 11, 0, 6, 0, "CLIP"],
[12, 10, 0, 8, 1, "VAE"],
[19, 16, 0, 13, 2, "SAMPLER"],
[20, 17, 0, 13, 3, "SIGMAS"],
[24, 13, 0, 8, 0, "LATENT"],
[30, 22, 0, 13, 1, "GUIDER"],
[37, 25, 0, 13, 0, "NOISE"],
[41, 6, 0, 26, 0, "CONDITIONING"],
[54, 30, 0, 22, 0, "MODEL"],
[55, 30, 0, 17, 0, "MODEL"],
[56, 12, 0, 30, 0, "MODEL"],
[112, 34, 0, 27, 0, "INT"],
[113, 35, 0, 27, 1, "INT"],
[114, 35, 0, 30, 2, "INT"],
[115, 34, 0, 30, 1, "INT"],
[116, 27, 0, 13, 4, "LATENT"],
[117, 38, 0, 39, 0, "CLIP_VISION"],
[118, 40, 0, 39, 1, "IMAGE"],
[119, 42, 0, 41, 1, "STYLE_MODEL"],
[120, 39, 0, 41, 2, "CLIP_VISION_OUTPUT"],
[122, 26, 0, 41, 0, "CONDITIONING"],
[123, 41, 0, 22, 1, "CONDITIONING"]
],
"groups": [
{
"id": 1,
"title": "Redux Model",
"bounding": [45, -480, 1040, 507.6],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.9,
"offset": [139.8, 57.78]
},
"groupNodes": {
"EmptyLatentImage": {
"nodes": [
{
"type": "PrimitiveNode",
"pos": [432, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 6,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"widget": {
"name": "height"
},
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 0
},
{
"type": "PrimitiveNode",
"pos": [672, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 7,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"slot_index": 0,
"widget": {
"name": "width"
}
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 1
},
{
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"link": null,
"widget": {
"name": "width"
}
},
{
"name": "height",
"type": "INT",
"link": null,
"widget": {
"name": "height"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"index": 2
}
],
"links": [
[1, 0, 2, 0, 34, "INT"],
[0, 0, 2, 1, 35, "INT"]
],
"external": [
[0, 0, "INT"],
[1, 0, "INT"],
[2, 0, "LATENT"]
],
"config": {
"0": {
"output": {
"0": {
"name": "height"
}
},
"input": {
"value": {
"visible": true
}
}
},
"1": {
"output": {
"0": {
"name": "width"
}
},
"input": {
"value": {
"visible": true
}
}
},
"2": {
"input": {
"width": {
"visible": false
},
"height": {
"visible": false
}
}
}
}
}
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "flux1-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/flux1-dev.safetensors?download=true",
"directory": "diffusion_models"
},
{
"name": "sigclip_vision_patch14_384.safetensors",
"url": "https://huggingface.co/Comfy-Org/sigclip_vision_384/resolve/main/sigclip_vision_patch14_384.safetensors?download=true",
"directory": "clip_vision"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-redux-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Redux-dev/resolve/main/flux1-redux-dev.safetensors?download=true",
"directory": "style_models"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 201 KiB

View File

@@ -1,302 +0,0 @@
{
"last_node_id": 37,
"last_link_id": 58,
"nodes": [
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [390, 400],
"size": [422.85, 164.31],
"flags": {
"collapsed": true
},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 54,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [55],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [471, 455],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1151, 195],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 52
},
{
"name": "vae",
"type": "VAE",
"link": 46
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1375, 194],
"size": [985.3, 1060.38],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 31,
"type": "KSampler",
"pos": [816, 192],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 47
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 58
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 55
},
{
"name": "latent_image",
"type": "LATENT",
"link": 51
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [52],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
173805153958730,
"randomize",
4,
1,
"euler",
"simple",
1
]
},
{
"id": 30,
"type": "CheckpointLoaderSimple",
"pos": [48, 192],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [47],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [45, 54],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [46],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["flux1-schnell-fp8.safetensors"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 192],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 45
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a bottle with a beautiful rainbow galaxy inside it on top of a wooden table in the middle of a modern kitchen beside a plate of vegetables and mushrooms and a wine glasse that contains a planet earth with a plate with a half eaten apple pie on it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 34,
"type": "Note",
"pos": [831, 501],
"size": [282.86, 164.08],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"Note that Flux dev and schnell do not have any negative prompt so CFG should be set to 1.0. Setting CFG to 1.0 means the negative prompt is ignored.\n\nThe schnell model is a distilled model that can generate a good image with only 4 steps."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 37,
"type": "MarkdownNote",
"pos": [45, 345],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-schnell-1)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[45, 30, 1, 6, 0, "CLIP"],
[46, 30, 2, 8, 1, "VAE"],
[47, 30, 0, 31, 0, "MODEL"],
[51, 27, 0, 31, 3, "LATENT"],
[52, 31, 0, 8, 0, "LATENT"],
[54, 30, 1, 33, 0, "CLIP"],
[55, 33, 0, 31, 2, "CONDITIONING"],
[58, 6, 0, 31, 1, "CONDITIONING"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [0.68, 1.83]
}
},
"version": 0.4,
"models": [
{
"name": "flux1-schnell-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-schnell/resolve/main/flux1-schnell-fp8.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 103 KiB

View File

@@ -1,376 +0,0 @@
{
"last_node_id": 28,
"last_link_id": 79,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 77
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 57
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1023216319780679,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-260, -340],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [69],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph scenery landscape, snow beautiful scenery mountain, glass bottle; purple galaxy bottle; sun"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [300, 230],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1495, 167],
"size": [493.63, 561.54],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["gligen/testing"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [410, 460],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 27,
"type": "GLIGENTextBoxApply",
"pos": [770, -340],
"size": [437.22, 382.68],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning_to",
"type": "CONDITIONING",
"link": 78
},
{
"name": "clip",
"type": "CLIP",
"link": 74
},
{
"name": "gligen_textbox_model",
"type": "GLIGEN",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [77],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "GLIGENTextBoxApply"
},
"widgets_values": ["sun", 144, 144, 416, 16]
},
{
"id": 21,
"type": "GLIGENTextBoxApply",
"pos": [270, -340],
"size": [437.22, 382.68],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "conditioning_to",
"type": "CONDITIONING",
"link": 69
},
{
"name": "clip",
"type": "CLIP",
"link": 53
},
{
"name": "gligen_textbox_model",
"type": "GLIGEN",
"link": 54
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [65, 78],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "GLIGENTextBoxApply"
},
"widgets_values": ["purple galaxy bottle", 192, 304, 176, 272]
},
{
"id": 10,
"type": "GLIGENLoader",
"pos": [-230, -70],
"size": [390, 60],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "GLIGEN",
"type": "GLIGEN",
"links": [54, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "GLIGENLoader"
},
"widgets_values": ["gligen_sd14_textbox_pruned.safetensors"]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-220, 130],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [5, 53, 67, 74],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [79],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 28,
"type": "MarkdownNote",
"pos": [-210, 285],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/gligen/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[5, 4, 1, 7, 0, "CLIP"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[53, 4, 1, 21, 1, "CLIP"],
[54, 10, 0, 21, 2, "GLIGEN"],
[57, 7, 0, 3, 2, "CONDITIONING"],
[67, 4, 1, 24, 0, "CLIP"],
[69, 24, 0, 21, 0, "CONDITIONING"],
[74, 4, 1, 27, 1, "CLIP"],
[75, 10, 0, 27, 2, "GLIGEN"],
[77, 27, 0, 3, 1, "CONDITIONING"],
[78, 21, 0, 27, 0, "CONDITIONING"],
[79, 4, 2, 8, 1, "VAE"]
],
"groups": [
{
"id": 1,
"title": "Base Prompt",
"bounding": [-315, -465, 518, 302],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "GLIGEN (for best results the elements should match some elements in the base prompt)",
"bounding": [255, -465, 980, 529],
"color": "#A88",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.8,
"offset": [433.59, 361.81]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "gligen_sd14_textbox_pruned.safetensors",
"url": "https://huggingface.co/comfyanonymous/GLIGEN_pruned_safetensors/resolve/main/gligen_sd14_textbox_pruned.safetensors?download=true",
"directory": "gligen"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 78 KiB

View File

@@ -1,607 +0,0 @@
{
"last_node_id": 26,
"last_link_id": 35,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1185.5, 412.07],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 30
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 13,
"type": "VAEDecode",
"pos": [3221.22, 232.38],
"size": [210, 46],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 15
},
{
"name": "vae",
"type": "VAE",
"link": 33
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [81.78, 142.34],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 28
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4, 12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece HDR victorian portrait painting of woman, blonde hair, mountain nature, blue sky\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [84.78, 352.34],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 29
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6, 13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands, text, watermark\n"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [142.78, 571.34],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [552.78, 143.34],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 34
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 20],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
251225068430076,
"randomize",
12,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 21,
"type": "VAEDecode",
"pos": [988.18, 29.56],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 20
},
{
"name": "vae",
"type": "VAE",
"link": 32
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAEEncode",
"pos": [2459.1, 103.02],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 26
},
{
"name": "vae",
"type": "VAE",
"link": 31
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncode"
},
"widgets_values": []
},
{
"id": 22,
"type": "ImageUpscaleWithModel",
"pos": [1631.06, 3.66],
"size": [226.8, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "upscale_model",
"type": "UPSCALE_MODEL",
"link": 24
},
{
"name": "image",
"type": "IMAGE",
"link": 23
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageUpscaleWithModel"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1446, 411],
"size": [611.26, 628.6],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "ImageScale",
"pos": [1931, 10],
"size": [315, 130],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 27
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [26],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageScale"
},
"widgets_values": ["bilinear", 1536, 1536, "disabled"]
},
{
"id": 12,
"type": "SaveImage",
"pos": [3463, 230],
"size": [868.01, 936.97],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 17
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 11,
"type": "KSampler",
"pos": [2811.96, 176.22],
"size": [315, 262],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 35,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 12,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 13,
"slot_index": 2
},
{
"name": "latent_image",
"type": "LATENT",
"link": 18,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
783745448521451,
"randomize",
14,
8,
"uni_pc_bh2",
"normal",
0.5
]
},
{
"id": 25,
"type": "CheckpointLoaderSimple",
"pos": [-262, 284],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [34, 35],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [28, 29],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [30, 31, 32, 33],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 23,
"type": "UpscaleModelLoader",
"pos": [1288.06, -39.34],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "UPSCALE_MODEL",
"type": "UPSCALE_MODEL",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UpscaleModelLoader"
},
"widgets_values": ["RealESRGAN_x4plus.pth"]
},
{
"id": 26,
"type": "MarkdownNote",
"pos": [-300, 750],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/#non-latent-upscaling)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[12, 6, 0, 11, 1, "CONDITIONING"],
[13, 7, 0, 11, 2, "CONDITIONING"],
[15, 11, 0, 13, 0, "LATENT"],
[17, 13, 0, 12, 0, "IMAGE"],
[18, 20, 0, 11, 3, "LATENT"],
[20, 3, 0, 21, 0, "LATENT"],
[23, 21, 0, 22, 1, "IMAGE"],
[24, 23, 0, 22, 0, "UPSCALE_MODEL"],
[26, 24, 0, 20, 0, "IMAGE"],
[27, 22, 0, 24, 0, "IMAGE"],
[28, 25, 1, 6, 0, "CLIP"],
[29, 25, 1, 7, 0, "CLIP"],
[30, 25, 2, 8, 1, "VAE"],
[31, 25, 2, 20, 1, "VAE"],
[32, 25, 2, 21, 1, "VAE"],
[33, 25, 2, 13, 1, "VAE"],
[34, 25, 0, 3, 0, "MODEL"],
[35, 25, 0, 11, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Txt2Img",
"bounding": [-300, 0, 1211, 708],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Save Intermediate Image",
"bounding": [1170, 330, 516, 196],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Second pass",
"bounding": [2775, 90, 379, 429],
"color": "#444",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Save Final Image",
"bounding": [3210, 135, 483, 199],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "ESRGAN upscale with 4x model",
"bounding": [1260, -120, 578, 184],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "Decode to Pixel space",
"bounding": [960, -45, 285, 142],
"color": "#A88",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "Encode back to latent space",
"bounding": [2400, 15, 312, 157],
"color": "#A88",
"font_size": 24,
"flags": {}
},
{
"id": 8,
"title": "Downscale image to a more reasonable size",
"bounding": [1845, -75, 483, 245],
"color": "#8AA",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.71,
"offset": [448.42, 482.51]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "RealESRGAN_x4plus.pth",
"url": "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth",
"directory": "upscale_models"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 151 KiB

View File

@@ -1,442 +0,0 @@
{
"last_node_id": 17,
"last_link_id": 23,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1235.72, 577.19],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 21
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "LatentUpscale",
"pos": [1238, 170],
"size": [315, 130],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 10
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [14]
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1152, 1152, "disabled"]
},
{
"id": 13,
"type": "VAEDecode",
"pos": [1961, 125],
"size": [210, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 15
},
{
"name": "vae",
"type": "VAE",
"link": 22
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [374, 171],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4, 12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece HDR victorian portrait painting of woman, blonde hair, mountain nature, blue sky\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [377, 381],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6, 13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands, text, watermark\n"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [435, 600],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 11,
"type": "KSampler",
"pos": [1585, 114],
"size": [315, 262],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 12,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 13,
"slot_index": 2
},
{
"name": "latent_image",
"type": "LATENT",
"link": 14,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
469771404043268,
"randomize",
14,
8,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 12,
"type": "SaveImage",
"pos": [2203, 123],
"size": [407.54, 468.13],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 17
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [845, 172],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 18
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
89848141647836,
"randomize",
12,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 16,
"type": "CheckpointLoaderSimple",
"pos": [24, 315],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [18, 23],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [19, 20],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [21, 22],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1495.72, 576.19],
"size": [232.94, 282.43],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 17,
"type": "MarkdownNote",
"pos": [0, 780],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 3, 0, 10, 0, "LATENT"],
[12, 6, 0, 11, 1, "CONDITIONING"],
[13, 7, 0, 11, 2, "CONDITIONING"],
[14, 10, 0, 11, 3, "LATENT"],
[15, 11, 0, 13, 0, "LATENT"],
[17, 13, 0, 12, 0, "IMAGE"],
[18, 16, 0, 3, 0, "MODEL"],
[19, 16, 1, 6, 0, "CLIP"],
[20, 16, 1, 7, 0, "CLIP"],
[21, 16, 2, 8, 1, "VAE"],
[22, 16, 2, 13, 1, "VAE"],
[23, 16, 0, 11, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Txt2Img",
"bounding": [0, 30, 1211, 708],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Save Intermediate Image",
"bounding": [1230, 495, 516, 196],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Hires Fix",
"bounding": [1230, 30, 710, 464],
"color": "#b58b2a",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Save Final Image",
"bounding": [1950, 30, 483, 199],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.97,
"offset": [419.13, 209.33]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.1 MiB

View File

@@ -1,553 +0,0 @@
{
"last_node_id": 78,
"last_link_id": 215,
"nodes": [
{
"id": 16,
"type": "KSamplerSelect",
"pos": [484, 751],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [19]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [478, 860],
"size": [315, 106],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 190,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [20]
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": ["simple", 20, 1]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [520, 100],
"size": [317.4, 58],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 175
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [129],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [6],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 45,
"type": "EmptyHunyuanLatentVideo",
"pos": [475.54, 432.67],
"size": [315, 130],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [180],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyHunyuanLatentVideo"
},
"widgets_values": [848, 480, 73, 1]
},
{
"id": 22,
"type": "BasicGuider",
"pos": [600, 0],
"size": [222.35, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 195,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 129,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 67,
"type": "ModelSamplingSD3",
"pos": [360, 0],
"size": [210, 58],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 209
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [195],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingSD3"
},
"widgets_values": [7]
},
{
"id": 10,
"type": "VAELoader",
"pos": [0, 420],
"size": [350, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [206, 211],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["hunyuan_video_vae_bf16.safetensors"]
},
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [0, 270],
"size": [350, 106],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [205],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"llava_llama3_fp8_scaled.safetensors",
"hunyuan_video",
"default"
]
},
{
"id": 73,
"type": "VAEDecodeTiled",
"pos": [1150, 200],
"size": [210, 150],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 210
},
{
"name": "vae",
"type": "VAE",
"link": 211
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [215],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecodeTiled"
},
"widgets_values": [256, 64, 64, 8]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1150, 90],
"size": [210, 46],
"flags": {},
"order": 15,
"mode": 2,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 181
},
{
"name": "vae",
"type": "VAE",
"link": 206
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 74,
"type": "Note",
"pos": [1150, 360],
"size": [210, 170],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"Use the tiled decode node by default because most people will need it.\n\nLower the tile_size and overlap if you run out of memory."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [0, 150],
"size": [350, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [190, 209],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["hunyuan_video_t2v_720p_bf16.safetensors", "default"],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 77,
"type": "Note",
"pos": [0, 0],
"size": [350, 110],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"Select a fp8 weight_dtype if you are running out of memory."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [860, 200],
"size": [272.36, 124.54],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 180,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [181, 210],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 44,
"type": "CLIPTextEncode",
"pos": [420, 200],
"size": [422.85, 164.31],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 205
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [175],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"anime style anime girl with massive fennec ears and one big fluffy tail, she has blonde hair long hair blue eyes wearing a pink sweater and a long blue skirt walking in a beautiful outdoor scenery with snow mountains in the background"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 75,
"type": "SaveAnimatedWEBP",
"pos": [1410, 200],
"size": [315, 366],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 215
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 80, "default"]
},
{
"id": 25,
"type": "RandomNoise",
"pos": [479, 618],
"size": [315, 82],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [1, "randomize"],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 78,
"type": "MarkdownNote",
"pos": [0, 525],
"size": [225, 60],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/hunyuan_video/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[19, 16, 0, 13, 2, "SAMPLER"],
[20, 17, 0, 13, 3, "SIGMAS"],
[30, 22, 0, 13, 1, "GUIDER"],
[37, 25, 0, 13, 0, "NOISE"],
[129, 26, 0, 22, 1, "CONDITIONING"],
[175, 44, 0, 26, 0, "CONDITIONING"],
[180, 45, 0, 13, 4, "LATENT"],
[181, 13, 0, 8, 0, "LATENT"],
[190, 12, 0, 17, 0, "MODEL"],
[195, 67, 0, 22, 0, "MODEL"],
[205, 11, 0, 44, 0, "CLIP"],
[206, 10, 0, 8, 1, "VAE"],
[209, 12, 0, 67, 0, "MODEL"],
[210, 13, 0, 73, 0, "LATENT"],
[211, 10, 0, 73, 1, "VAE"],
[215, 73, 0, 75, 0, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"groupNodes": {},
"ds": {
"scale": 0.86,
"offset": [315.94, 195.23]
}
},
"version": 0.4,
"models": [
{
"name": "hunyuan_video_vae_bf16.safetensors",
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/vae/hunyuan_video_vae_bf16.safetensors?download=true",
"directory": "vae"
},
{
"name": "llava_llama3_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/text_encoders/llava_llama3_fp8_scaled.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "hunyuan_video_t2v_720p_bf16.safetensors",
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/diffusion_models/hunyuan_video_t2v_720p_bf16.safetensors?download=true",
"directory": "diffusion_models"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

View File

@@ -1,447 +0,0 @@
{
"last_node_id": 14,
"last_link_id": 17,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [
413,
389
],
"size": {
"0": 425.27801513671875,
"1": 180.6060791015625
},
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 15
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
6
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"watermark, text\n"
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
415,
186
],
"size": {
"0": 422.84503173828125,
"1": 164.31304931640625
},
"flags": {},
"order": 2,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
4
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph of victorian woman with wings, sky clouds, meadow grass\n"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
1209,
188
],
"size": {
"0": 210,
"1": 46
},
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 17
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
9
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
}
},
{
"id": 9,
"type": "SaveImage",
"pos": [
1451,
189
],
"size": {
"0": 210,
"1": 58
},
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"properties": {},
"widgets_values": [
"ComfyUI"
]
},
{
"id": 10,
"type": "LoadImage",
"pos": [
215.9799597167969,
703.6800268554688
],
"size": [
315,
314.00002670288086
],
"flags": {},
"order": 0,
"mode": 0,
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
10
],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null,
"shape": 3
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": [
"example.png",
"image"
]
},
{
"id": 12,
"type": "VAEEncode",
"pos": [
614.979959716797,
707.6800268554688
],
"size": {
"0": 210,
"1": 46
},
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 10
},
{
"name": "vae",
"type": "VAE",
"link": 16
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
11
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncode"
}
},
{
"id": 3,
"type": "KSampler",
"pos": [
863,
186
],
"size": {
"0": 315,
"1": 262
},
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 13
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 11
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
7
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
280823642470253,
"randomize",
20,
8,
"dpmpp_2m",
"normal",
0.8700000000000001
]
},
{
"id": 14,
"type": "CheckpointLoaderSimple",
"pos": [
19,
433
],
"size": {
"0": 315,
"1": 98
},
"flags": {},
"order": 1,
"mode": 0,
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
13
],
"shape": 3,
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [
14,
15
],
"shape": 3,
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
16,
17
],
"shape": 3,
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": [
"v1-5-pruned-emaonly-fp16.safetensors"
]
}
],
"links": [
[
4,
6,
0,
3,
1,
"CONDITIONING"
],
[
6,
7,
0,
3,
2,
"CONDITIONING"
],
[
7,
3,
0,
8,
0,
"LATENT"
],
[
9,
8,
0,
9,
0,
"IMAGE"
],
[
10,
10,
0,
12,
0,
"IMAGE"
],
[
11,
12,
0,
3,
3,
"LATENT"
],
[
13,
14,
0,
3,
0,
"MODEL"
],
[
14,
14,
1,
6,
0,
"CLIP"
],
[
15,
14,
1,
7,
0,
"CLIP"
],
[
16,
14,
2,
12,
1,
"VAE"
],
[
17,
14,
2,
8,
1,
"VAE"
]
],
"groups": [
{
"title": "Loading images",
"bounding": [
150,
630,
726,
171
],
"color": "#3f789e"
}
],
"config": {},
"extra": {},
"version": 0.4,
"models": [{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
}]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.5 MiB

View File

@@ -1,314 +0,0 @@
{
"last_node_id": 24,
"last_link_id": 41,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [867.8, 375.7],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 39
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 17
},
{
"name": "latent_image",
"type": "LATENT",
"link": 18
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
237514639057514,
"randomize",
20,
2.5,
"euler",
"karras",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1207.8, 375.7],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 26
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "SaveAnimatedWEBP",
"pos": [1459, 376],
"size": [741.67, 564.59],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 10
}
],
"outputs": [],
"properties": {
"Node name for S&R": "SaveAnimatedWEBP"
},
"widgets_values": ["ComfyUI", 10, false, 85, "default"]
},
{
"id": 12,
"type": "SVD_img2vid_Conditioning",
"pos": [487.8, 395.7],
"size": [315, 218],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 24
},
{
"name": "init_image",
"type": "IMAGE",
"link": 41,
"slot_index": 1
},
{
"name": "vae",
"type": "VAE",
"link": 25
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"shape": 3,
"links": [40],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"shape": 3,
"links": [17],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"shape": 3,
"links": [18],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "SVD_img2vid_Conditioning"
},
"widgets_values": [1024, 576, 14, 127, 6, 0]
},
{
"id": 14,
"type": "VideoLinearCFGGuidance",
"pos": [487.8, 265.7],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [39],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VideoLinearCFGGuidance"
},
"widgets_values": [1]
},
{
"id": 15,
"type": "ImageOnlyCheckpointLoader",
"pos": [55, 267],
"size": [369.6, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [23],
"slot_index": 0
},
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [24],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [25, 26],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "ImageOnlyCheckpointLoader"
},
"widgets_values": ["svd.safetensors"]
},
{
"id": 23,
"type": "LoadImage",
"pos": [106, 441],
"size": [315, 314.0],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [41]
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["mountains.png", "image"]
},
{
"id": 24,
"type": "MarkdownNote",
"pos": [105, 810],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/video/#image-to-video)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[10, 8, 0, 10, 0, "IMAGE"],
[17, 12, 1, 3, 2, "CONDITIONING"],
[18, 12, 2, 3, 3, "LATENT"],
[23, 15, 0, 14, 0, "MODEL"],
[24, 15, 1, 12, 0, "CLIP_VISION"],
[25, 15, 2, 12, 2, "VAE"],
[26, 15, 2, 8, 1, "VAE"],
[39, 14, 0, 3, 0, "MODEL"],
[40, 12, 0, 3, 1, "CONDITIONING"],
[41, 23, 0, 12, 1, "IMAGE"]
],
"groups": [
{
"id": 1,
"title": "Image to Video",
"bounding": [480, 195, 954, 478],
"color": "#8A8",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.96,
"offset": [255.53, 68.37]
}
},
"version": 0.4,
"models": [
{
"name": "svd.safetensors",
"url": "https://huggingface.co/stabilityai/stable-video-diffusion-img2vid/resolve/main/svd.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 114 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

View File

@@ -1,360 +0,0 @@
{
"last_node_id": 31,
"last_link_id": 87,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [432, 158],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 81
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"outdoors in the yosemite national park mountains nature\n\n\n\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [434, 371],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 82
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["watermark, text\n"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1422, 387],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 42
},
{
"name": "vae",
"type": "VAE",
"link": 83
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [22],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [940, 180],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 80
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 72
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
152545289528694,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 29,
"type": "CheckpointLoaderSimple",
"pos": [17, 303],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [80],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [81, 82],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [83, 84],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["512-inpainting-ema.safetensors"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [-107, 726],
"size": [344, 346],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [85],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_outpaint_example.png", "image"]
},
{
"id": 30,
"type": "ImagePadForOutpaint",
"pos": [269, 727],
"size": [315, 174],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 85
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [87],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [86],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ImagePadForOutpaint"
},
"widgets_values": [0, 128, 0, 128, 40]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1671, 384],
"size": [360.55, 441.53],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 22
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 26,
"type": "VAEEncodeForInpaint",
"pos": [617, 720],
"size": [226.8, 98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 87
},
{
"name": "vae",
"type": "VAE",
"link": 84
},
{
"name": "mask",
"type": "MASK",
"link": 86
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [72],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncodeForInpaint"
},
"widgets_values": [8]
},
{
"id": 31,
"type": "MarkdownNote",
"pos": [30, 465],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/inpaint/#outpainting)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[22, 8, 0, 9, 0, "IMAGE"],
[42, 3, 0, 8, 0, "LATENT"],
[72, 26, 0, 3, 3, "LATENT"],
[80, 29, 0, 3, 0, "MODEL"],
[81, 29, 1, 6, 0, "CLIP"],
[82, 29, 1, 7, 0, "CLIP"],
[83, 29, 2, 8, 1, "VAE"],
[84, 29, 2, 26, 1, "VAE"],
[85, 20, 0, 30, 0, "IMAGE"],
[86, 30, 1, 26, 2, "MASK"],
[87, 30, 0, 26, 0, "IMAGE"]
],
"groups": [
{
"id": 1,
"title": "Load image and pad for outpainting",
"bounding": [-120, 600, 1038, 509],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.86,
"offset": [491.92, 146.6]
}
},
"version": 0.4,
"models": [
{
"name": "512-inpainting-ema.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-inpainting/resolve/main/512-inpainting-ema.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 128 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

View File

@@ -1,323 +0,0 @@
{
"last_node_id": 30,
"last_link_id": 84,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [432, 158],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 81
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"closeup photograph of maine coon (cat:1.2) in the yosemite national park mountains nature"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [434, 371],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 82
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["watermark, text\n"]
},
{
"id": 26,
"type": "VAEEncodeForInpaint",
"pos": [503, 669],
"size": [226.8, 98],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 73
},
{
"name": "vae",
"type": "VAE",
"link": 83
},
{
"name": "mask",
"type": "MASK",
"link": 79
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [72],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncodeForInpaint"
},
"widgets_values": [6]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1422, 387],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 42
},
{
"name": "vae",
"type": "VAE",
"link": 84
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [22],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1709, 356],
"size": [210, 250],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 22
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 29,
"type": "CheckpointLoaderSimple",
"pos": [30, 314],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [80],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [81, 82],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [83, 84],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["512-inpainting-ema.safetensors"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [49, 679],
"size": [385, 365],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [73],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [79],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_inpaint_example.png", "image"]
},
{
"id": 3,
"type": "KSampler",
"pos": [940, 180],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 80
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 72
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1040111309094545,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 30,
"type": "MarkdownNote",
"pos": [30, 480],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/inpaint/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[22, 8, 0, 9, 0, "IMAGE"],
[42, 3, 0, 8, 0, "LATENT"],
[72, 26, 0, 3, 3, "LATENT"],
[73, 20, 0, 26, 0, "IMAGE"],
[79, 20, 1, 26, 2, "MASK"],
[80, 29, 0, 3, 0, "MODEL"],
[81, 29, 1, 6, 0, "CLIP"],
[82, 29, 1, 7, 0, "CLIP"],
[83, 29, 2, 26, 1, "VAE"],
[84, 29, 2, 8, 1, "VAE"]
],
"groups": [
{
"id": 1,
"title": "Load image and alpha mask for inpainting",
"bounding": [-15, 600, 786, 442],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.88,
"offset": [832.78, 166.61]
}
},
"version": 0.4,
"models": [
{
"name": "512-inpainting-ema.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-inpainting/resolve/main/512-inpainting-ema.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 301 KiB

View File

@@ -1,528 +0,0 @@
{
"last_node_id": 33,
"last_link_id": 62,
"nodes": [
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-60, 229],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [54],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8, 31],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["wd-illusion-fp16.safetensors"]
},
{
"id": 13,
"type": "CheckpointLoaderSimple",
"pos": [1296, -571],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [56],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [27],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["cardosAnime_v10.safetensors"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [370, 40],
"size": [510, 220],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"anime happy girl (fennec:1.2) (ears:1.3) blonde long (messy hair:1.1) blue eyes, wearing serafuku jeans (sitting on rock:1.15) (spread legs:1.15) (sneakers:0.95) in lake rural swiss village on the mountain side sky clouds HDR sunset\n(exceptional, best aesthetic, new, newest, best quality, masterpiece, extremely detailed, anime:1.05)\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [370, 300],
"size": [510, 190],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"lowres, bad anatomy, bad hands, (text:1.1), blurry, mutated hands and fingers, mutation, deformed face, ugly, (logo:1.1), cropped, worst quality, jpeg, (jpeg artifacts), deleted, old, oldest, (censored), (bad aesthetic), (mosaic censoring, bar censor, blur censor) earphones"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [560, 540],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1368, 768, 1]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1280, 140],
"size": [210, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "SaveImage",
"pos": [1540, 140],
"size": [1174.13, 734.16],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 10
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 22,
"type": "CLIPSetLastLayer",
"pos": [1670, -550],
"size": [315, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 27
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [13, 14],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [2060, -920],
"size": [662.38, 313.1],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"from far away anime happy girl (fennec ears:0.95) long (messy hair:1.3) blue eyes, wearing serafuku jeans sitting on rock spread legs (sneakers:0.95) in lake rural swiss village on the mountain side sky clouds HDR sunset\n"
]
},
{
"id": 14,
"type": "CLIPTextEncode",
"pos": [2060, -550],
"size": [660, 300],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 13
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), (text:1.1), letters, numbers, error, cropped, (jpeg artifacts:1.2), (signature:1.1), (watermark:1.1), username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.1), extra legs, (forehead mark) (penis)"
]
},
{
"id": 11,
"type": "VAEDecode",
"pos": [3240, -750],
"size": [210, 46],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 60
},
{
"name": "vae",
"type": "VAE",
"link": 31
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 12,
"type": "SaveImage",
"pos": [3540, -750],
"size": [1868.09, 1101.47],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 12
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 32,
"type": "KSampler",
"pos": [2830, -750],
"size": [315, 262],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 57
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 58
},
{
"name": "latent_image",
"type": "LATENT",
"link": 59
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [60],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
417682270866800,
"randomize",
8,
13,
"dpmpp_sde",
"simple",
0.5
]
},
{
"id": 27,
"type": "LatentUpscaleBy",
"pos": [1510, -160],
"size": [325.41, 82],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 62,
"slot_index": 0
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [59],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscaleBy"
},
"widgets_values": ["bislerp", 1.5]
},
{
"id": 3,
"type": "KSampler",
"pos": [920, 140],
"size": [318.5, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 62],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
758448896326830,
"randomize",
14,
8,
"dpmpp_sde",
"simple",
1
]
},
{
"id": 33,
"type": "MarkdownNote",
"pos": [-45, 375],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/#more-examples)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[3, 4, 1, 6, 0, "CLIP"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[5, 4, 1, 7, 0, "CLIP"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[10, 8, 0, 10, 0, "IMAGE"],
[12, 11, 0, 12, 0, "IMAGE"],
[13, 22, 0, 14, 0, "CLIP"],
[14, 22, 0, 15, 0, "CLIP"],
[27, 13, 1, 22, 0, "CLIP"],
[31, 4, 2, 11, 1, "VAE"],
[54, 4, 0, 3, 0, "MODEL"],
[56, 13, 0, 32, 0, "MODEL"],
[57, 15, 0, 32, 1, "CONDITIONING"],
[58, 14, 0, 32, 2, "CONDITIONING"],
[59, 27, 0, 32, 3, "LATENT"],
[60, 32, 0, 11, 0, "LATENT"],
[62, 3, 0, 27, 0, "LATENT"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.76,
"offset": [1200.17, 444.58]
}
},
"version": 0.4
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 134 KiB

View File

@@ -1,311 +0,0 @@
{
"last_node_id": 11,
"last_link_id": 14,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 13
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["masterpiece best quality girl"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 12
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
851616030078638,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [210, 250],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-461, 288],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [10],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [11],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 10,
"type": "LoraLoader",
"pos": [-25, 144],
"size": [315, 126],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 10
},
{
"name": "clip",
"type": "CLIP",
"link": 11
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [12],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [13, 14],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoraLoader"
},
"widgets_values": ["epiNoiseoffset_v2.safetensors", 1, 1]
},
{
"id": 11,
"type": "MarkdownNote",
"pos": [-450, 435],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/lora/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 4, 0, 10, 0, "MODEL"],
[11, 4, 1, 10, 1, "CLIP"],
[12, 10, 0, 3, 0, "MODEL"],
[13, 10, 1, 6, 0, "CLIP"],
[14, 10, 1, 7, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.06,
"offset": [777.19, 192.48]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "epiNoiseoffset_v2.safetensors",
"url": "https://civitai.com/api/download/models/16576?type=Model&format=SafeTensor&size=full&fp=fp16",
"directory": "loras"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

View File

@@ -1,357 +0,0 @@
{
"last_node_id": 12,
"last_link_id": 18,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 13
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["masterpiece best quality girl"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 12
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
513173432917412,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [210, 250],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 10,
"type": "LoraLoader",
"pos": [-27, 160],
"size": [315, 126],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 15
},
{
"name": "clip",
"type": "CLIP",
"link": 16
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [12],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [13, 14],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoraLoader"
},
"widgets_values": ["epiNoiseoffset_v2.safetensors", 1, 1]
},
{
"id": 11,
"type": "LoraLoader",
"pos": [-379, 160],
"size": [315, 126],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 17
},
{
"name": "clip",
"type": "CLIP",
"link": 18
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [15],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [16],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoraLoader"
},
"widgets_values": ["theovercomer8sContrastFix_sd15.safetensors", 1, 1]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-780, 284],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [17],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [18],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 12,
"type": "MarkdownNote",
"pos": [-765, 450],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/lora/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"],
[12, 10, 0, 3, 0, "MODEL"],
[13, 10, 1, 6, 0, "CLIP"],
[14, 10, 1, 7, 0, "CLIP"],
[15, 11, 0, 10, 0, "MODEL"],
[16, 11, 1, 10, 1, "CLIP"],
[17, 4, 0, 11, 0, "MODEL"],
[18, 4, 1, 11, 1, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.76,
"offset": [1200.17, 444.58]
}
},
"version": 0.4,
"models": [
{
"name": "theovercomer8sContrastFix_sd15.safetensors",
"url": "https://civitai.com/api/download/models/10350?type=Model&format=SafeTensor&size=full&fp=fp16",
"directory": "loras"
},
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "epiNoiseoffset_v2.safetensors",
"url": "https://civitai.com/api/download/models/16576?type=Model&format=SafeTensor&size=full&fp=fp16",
"directory": "loras"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.4 MiB

View File

@@ -1,482 +0,0 @@
{
"last_node_id": 79,
"last_link_id": 190,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [420, 190],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [187],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"best quality, 4k, HDR, a tracking shot of a beautiful scene of the sea waves on the beach with a massive explosion in the water"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [420, 390],
"size": [425.28, 180.61],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [188],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"low quality, worst quality, deformed, distorted, disfigured, motion smear, motion artifacts, fused fingers, bad anatomy, weird hand, ugly"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1600, 30],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 171
},
{
"name": "vae",
"type": "VAE",
"link": 87
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [106],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 38,
"type": "CLIPLoader",
"pos": [60, 190],
"size": [315, 82],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [74, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5xxl_fp16.safetensors", "ltxv", "default"]
},
{
"id": 41,
"type": "SaveAnimatedWEBP",
"pos": [1830, 30],
"size": [680, 610],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 106
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 90, "default"]
},
{
"id": 44,
"type": "CheckpointLoaderSimple",
"pos": [520, 30],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [181],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": null
},
{
"name": "VAE",
"type": "VAE",
"links": [87, 189],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["ltx-video-2b-v0.9.safetensors"]
},
{
"id": 69,
"type": "LTXVConditioning",
"pos": [920, 60],
"size": [223.87, 78],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 183
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 184
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [166],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [167],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LTXVConditioning"
},
"widgets_values": [25]
},
{
"id": 71,
"type": "LTXVScheduler",
"pos": [856, 531],
"size": [315, 154],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "latent",
"type": "LATENT",
"shape": 7,
"link": 185
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"links": [182],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LTXVScheduler"
},
"widgets_values": [30, 2.05, 0.95, true, 0.1]
},
{
"id": 72,
"type": "SamplerCustom",
"pos": [1201, 32],
"size": [355.2, 230],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 181
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 166
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 167
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 172
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 182
},
{
"name": "latent_image",
"type": "LATENT",
"link": 186
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"links": [171],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustom"
},
"widgets_values": [true, 501744655390087, "randomize", 3]
},
{
"id": 73,
"type": "KSamplerSelect",
"pos": [860, 420],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"links": [172]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 76,
"type": "Note",
"pos": [40, 350],
"size": [360, 200],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"This model needs long descriptive prompts, if the prompt is too short the quality will suffer greatly."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 77,
"type": "LTXVImgToVideo",
"pos": [863, 181],
"size": [315, 214],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 187
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 188
},
{
"name": "vae",
"type": "VAE",
"link": 189
},
{
"name": "image",
"type": "IMAGE",
"link": 190
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [183],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [184],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [185, 186],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "LTXVImgToVideo"
},
"widgets_values": [768, 512, 97, 1, 0.15]
},
{
"id": 78,
"type": "LoadImage",
"pos": [420, 620],
"size": [385.16, 333.33],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [190]
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["island.jpg", "image"]
},
{
"id": 79,
"type": "MarkdownNote",
"pos": [45, 600],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/ltxv/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[74, 38, 0, 6, 0, "CLIP"],
[75, 38, 0, 7, 0, "CLIP"],
[87, 44, 2, 8, 1, "VAE"],
[106, 8, 0, 41, 0, "IMAGE"],
[166, 69, 0, 72, 1, "CONDITIONING"],
[167, 69, 1, 72, 2, "CONDITIONING"],
[171, 72, 0, 8, 0, "LATENT"],
[172, 73, 0, 72, 3, "SAMPLER"],
[181, 44, 0, 72, 0, "MODEL"],
[182, 71, 0, 72, 4, "SIGMAS"],
[183, 77, 0, 69, 0, "CONDITIONING"],
[184, 77, 1, 69, 1, "CONDITIONING"],
[185, 77, 2, 71, 0, "LATENT"],
[186, 77, 2, 72, 5, "LATENT"],
[187, 6, 0, 77, 0, "CONDITIONING"],
[188, 7, 0, 77, 1, "CONDITIONING"],
[189, 44, 2, 77, 2, "VAE"],
[190, 78, 0, 77, 3, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.23,
"offset": [-35.52, 153.62]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ltx-video-2b-v0.9.safetensors",
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.7 MiB

View File

@@ -1,419 +0,0 @@
{
"last_node_id": 77,
"last_link_id": 182,
"nodes": [
{
"id": 38,
"type": "CLIPLoader",
"pos": [60, 190],
"size": [315, 82],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [74, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5xxl_fp16.safetensors", "ltxv", "default"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1600, 30],
"size": [210, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 171
},
{
"name": "vae",
"type": "VAE",
"link": 87
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [106],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 69,
"type": "LTXVConditioning",
"pos": [920, 60],
"size": [223.87, 78],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 169
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 170
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [166],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [167],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LTXVConditioning"
},
"widgets_values": [25]
},
{
"id": 72,
"type": "SamplerCustom",
"pos": [1201, 32],
"size": [355.2, 230],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 181
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 166
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 167
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 172
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 182
},
{
"name": "latent_image",
"type": "LATENT",
"link": 175
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"links": [171],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustom"
},
"widgets_values": [true, 497797676867141, "randomize", 3]
},
{
"id": 44,
"type": "CheckpointLoaderSimple",
"pos": [520, 30],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [181],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": null
},
{
"name": "VAE",
"type": "VAE",
"links": [87],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["ltx-video-2b-v0.9.safetensors"]
},
{
"id": 70,
"type": "EmptyLTXVLatentVideo",
"pos": [860, 240],
"size": [315, 130],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [168, 175],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLTXVLatentVideo"
},
"widgets_values": [768, 512, 97, 1]
},
{
"id": 71,
"type": "LTXVScheduler",
"pos": [856, 531],
"size": [315, 154],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "latent",
"type": "LATENT",
"shape": 7,
"link": 168
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"links": [182],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LTXVScheduler"
},
"widgets_values": [30, 2.05, 0.95, true, 0.1]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [420, 190],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [169],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"A woman with long brown hair and light skin smiles at another woman with long blonde hair. The woman with brown hair wears a black jacket and has a small, barely noticeable mole on her right cheek. The camera angle is a close-up, focused on the woman with brown hair's face. The lighting is warm and natural, likely from the setting sun, casting a soft glow on the scene. The scene appears to be real-life footage."
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [420, 390],
"size": [425.28, 180.61],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [170],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"low quality, worst quality, deformed, distorted, disfigured, motion smear, motion artifacts, fused fingers, bad anatomy, weird hand, ugly"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 73,
"type": "KSamplerSelect",
"pos": [860, 420],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"links": [172]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 76,
"type": "Note",
"pos": [40, 350],
"size": [360, 200],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"This model needs long descriptive prompts, if the prompt is too short the quality will suffer greatly."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 41,
"type": "SaveAnimatedWEBP",
"pos": [1830, 30],
"size": [680, 610],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 106
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 90, "default"]
},
{
"id": 77,
"type": "MarkdownNote",
"pos": [45, 600],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/ltxv/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[74, 38, 0, 6, 0, "CLIP"],
[75, 38, 0, 7, 0, "CLIP"],
[87, 44, 2, 8, 1, "VAE"],
[106, 8, 0, 41, 0, "IMAGE"],
[166, 69, 0, 72, 1, "CONDITIONING"],
[167, 69, 1, 72, 2, "CONDITIONING"],
[168, 70, 0, 71, 0, "LATENT"],
[169, 6, 0, 69, 0, "CONDITIONING"],
[170, 7, 0, 69, 1, "CONDITIONING"],
[171, 72, 0, 8, 0, "LATENT"],
[172, 73, 0, 72, 3, "SAMPLER"],
[175, 70, 0, 72, 5, "LATENT"],
[181, 44, 0, 72, 0, "MODEL"],
[182, 71, 0, 72, 4, "SIGMAS"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.65,
"offset": [1490.32, 926.49]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "ltx-video-2b-v0.9.safetensors",
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 214 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 12 KiB

View File

@@ -1,492 +0,0 @@
{
"last_node_id": 32,
"last_link_id": 43,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [180, 203],
"size": [425.28, 180.61],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 42
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis)"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [287, 462],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1053, 172],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 28
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-823, -550],
"size": [422.85, 164.31],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 43
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [37],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo) girl photograph realistic (flat chest:0.9), (fennec ears:1.0) (fox ears:1.0), (messy hair) blonde hair, blue eyes, standing, serafuku sweater, (brick house) (scenery HDR landscape) (sun clouds) sky, mountains,\n\n"
]
},
{
"id": 21,
"type": "LoadImage",
"pos": [-560, -144],
"size": [272.84, 372.22],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [33],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["pose_present.png", "image"]
},
{
"id": 31,
"type": "CheckpointLoaderSimple",
"pos": [-1005, 281],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [41],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [42, 43],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AOM3A1.safetensors"]
},
{
"id": 15,
"type": "VAELoader",
"pos": [720, 506],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["kl-f8-anime2.ckpt"]
},
{
"id": 27,
"type": "ControlNetLoader",
"pos": [-641, -245],
"size": [352.55, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [39],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_openpose_fp16.safetensors"]
},
{
"id": 26,
"type": "ControlNetLoader",
"pos": [156, -339],
"size": [343.32, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [38],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_scribble_fp16.safetensors"]
},
{
"id": 22,
"type": "ControlNetApply",
"pos": [-204, -240],
"size": [317.4, 98],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 37
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 39
},
{
"name": "image",
"type": "IMAGE",
"link": 33
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [35],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1]
},
{
"id": 3,
"type": "KSampler",
"pos": [699, 167],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 41
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
894480165483805,
"randomize",
12,
6,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 23,
"type": "ControlNetApply",
"pos": [550.81, -385.59],
"size": [317.4, 98],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 35
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 38
},
{
"name": "image",
"type": "IMAGE",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [0.8]
},
{
"id": 20,
"type": "LoadImage",
"pos": [188, -217],
"size": [278.1, 361.87],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [34],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["house_scribble.png", "image"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1310, 169],
"size": [516.95, 567.67],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 32,
"type": "MarkdownNote",
"pos": [-1005, 435],
"size": [225, 60],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#mixing-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[28, 15, 0, 8, 1, "VAE"],
[33, 21, 0, 22, 2, "IMAGE"],
[34, 20, 0, 23, 2, "IMAGE"],
[35, 22, 0, 23, 0, "CONDITIONING"],
[37, 24, 0, 22, 0, "CONDITIONING"],
[38, 26, 0, 23, 1, "CONTROL_NET"],
[39, 27, 0, 22, 1, "CONTROL_NET"],
[40, 23, 0, 3, 1, "CONDITIONING"],
[41, 31, 0, 3, 0, "MODEL"],
[42, 31, 1, 7, 0, "CLIP"],
[43, 31, 1, 24, 0, "CLIP"]
],
"groups": [
{
"id": 1,
"title": "Apply Pose ControlNet",
"bounding": [-735, -360, 859, 323],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Apply Scribble ControlNet",
"bounding": [165, -480, 739, 336],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.81,
"offset": [2040.05, 734.44]
}
},
"version": 0.4,
"models": [
{
"name": "control_v11p_sd15_scribble_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_scribble_fp16.safetensors?download=true",
"directory": "controlnet"
},
{
"name": "control_v11p_sd15_openpose_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_openpose_fp16.safetensors",
"directory": "controlnet"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 MiB

View File

@@ -1,308 +0,0 @@
{
"last_node_id": 40,
"last_link_id": 79,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [863, 187],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 79
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 52
},
{
"name": "latent_image",
"type": "LATENT",
"link": 38
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [35],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
704883238463297,
"randomize",
30,
4.5,
"euler",
"simple",
1
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a fox moving quickly in a beautiful winter scenery nature trees sunset tracking camera"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [52],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1210, 190],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 35
},
{
"name": "vae",
"type": "VAE",
"link": 76
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [56],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 21,
"type": "EmptyMochiLatentVideo",
"pos": [520, 620],
"size": [315, 130],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [38],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyMochiLatentVideo"
},
"widgets_values": [848, 480, 37, 1]
},
{
"id": 28,
"type": "SaveAnimatedWEBP",
"pos": [1460, 190],
"size": [847.3, 602.03],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 56
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 80, "default"]
},
{
"id": 37,
"type": "UNETLoader",
"pos": [420, 40],
"size": [315, 82],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [79],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["mochi_preview_bf16.safetensors", "default"]
},
{
"id": 38,
"type": "CLIPLoader",
"pos": [40, 270],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [74, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5xxl_fp16.safetensors", "mochi", "default"]
},
{
"id": 39,
"type": "VAELoader",
"pos": [890, 500],
"size": [278.68, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [76]
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["mochi_vae.safetensors"]
},
{
"id": 40,
"type": "MarkdownNote",
"pos": [45, 405],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/mochi/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[35, 3, 0, 8, 0, "LATENT"],
[38, 21, 0, 3, 3, "LATENT"],
[46, 6, 0, 3, 1, "CONDITIONING"],
[52, 7, 0, 3, 2, "CONDITIONING"],
[56, 8, 0, 28, 0, "IMAGE"],
[74, 38, 0, 6, 0, "CLIP"],
[75, 38, 0, 7, 0, "CLIP"],
[76, 39, 0, 8, 1, "VAE"],
[79, 37, 0, 3, 0, "MODEL"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [35.42, 115.48]
}
},
"version": 0.4,
"models": [
{
"name": "mochi_vae.safetensors",
"url": "https://huggingface.co/Comfy-Org/mochi_preview_repackaged/resolve/main/split_files/vae/mochi_vae.safetensors?download=true",
"directory": "vae"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "mochi_preview_bf16.safetensors",
"url": "https://huggingface.co/Comfy-Org/mochi_preview_repackaged/resolve/main/split_files/diffusion_models/mochi_preview_bf16.safetensors?download=true",
"directory": "diffusion_models"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 387 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

View File

@@ -1,582 +0,0 @@
{
"last_node_id": 57,
"last_link_id": 113,
"nodes": [
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [
1,
-17
],
"size": [
389.7508239746094,
98
],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
14
],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [
65
],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
8,
85
],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": [
"sd3.5_large_fp8_scaled.safetensors"
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
0,
145
],
"size": [
388.7348327636719,
188.959716796875
],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 65
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
98,
109
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"crystal butterfly above the sea, white, hyper detailed, with diamonds"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
770,
310
],
"size": [
278.8823547363281,
46.5799446105957
],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 63
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
13
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [
1097,
-14
],
"size": [
845.74560546875,
898.2359619140625
],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 13
}
],
"outputs": [],
"properties": {},
"widgets_values": [
"ComfyUI"
]
},
{
"id": 33,
"type": "EmptySD3LatentImage",
"pos": [
420,
250
],
"size": [
300.9447021484375,
106
],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [
66
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [
1024,
1024,
1
]
},
{
"id": 50,
"type": "ConditioningZeroOut",
"pos": [
94,
404
],
"size": [
317.4000244140625,
26
],
"flags": {
"collapsed": true
},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [
108
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningZeroOut"
},
"widgets_values": []
},
{
"id": 46,
"type": "ControlNetLoader",
"pos": [
-15,
472
],
"size": [
411.968017578125,
58.06914520263672
],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"shape": 3,
"links": [
87
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": [
"sd3.5_large_controlnet_blur.safetensors"
]
},
{
"id": 57,
"type": "LoadImage",
"pos": [
449,
478
],
"size": [
470.65765380859375,
461.4942932128906
],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
113
],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": [
"ComfyUI_00204_.png",
"image"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [
770,
-10
],
"size": [
284.1198425292969,
262
],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 83,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 84
},
{
"name": "latent_image",
"type": "LATENT",
"link": 66
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
63
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
268264726798396,
"randomize",
30,
4,
"euler",
"simple",
1
]
},
{
"id": 44,
"type": "ControlNetApplySD3",
"pos": [
420,
-20
],
"size": [
315,
186
],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 109
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 108
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 87,
"slot_index": 2
},
{
"name": "vae",
"type": "VAE",
"link": 85,
"slot_index": 3
},
{
"name": "image",
"type": "IMAGE",
"link": 113,
"slot_index": 4
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"shape": 3,
"links": [
83
],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"shape": 3,
"links": [
84
],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ControlNetApplySD3"
},
"widgets_values": [
0.7000000000000001,
0,
1
]
}
],
"links": [
[
8,
4,
2,
8,
1,
"VAE"
],
[
13,
8,
0,
9,
0,
"IMAGE"
],
[
14,
4,
0,
3,
0,
"MODEL"
],
[
63,
3,
0,
8,
0,
"LATENT"
],
[
65,
4,
1,
6,
0,
"CLIP"
],
[
66,
33,
0,
3,
3,
"LATENT"
],
[
83,
44,
0,
3,
1,
"CONDITIONING"
],
[
84,
44,
1,
3,
2,
"CONDITIONING"
],
[
85,
4,
2,
44,
3,
"VAE"
],
[
87,
46,
0,
44,
2,
"CONTROL_NET"
],
[
92,
48,
0,
47,
0,
"IMAGE"
],
[
98,
6,
0,
50,
0,
"CONDITIONING"
],
[
102,
47,
0,
53,
0,
"IMAGE"
],
[
108,
50,
0,
44,
1,
"CONDITIONING"
],
[
109,
6,
0,
44,
0,
"CONDITIONING"
],
[
113,
57,
0,
44,
4,
"IMAGE"
]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.555991731349224,
"offset": [
224.63848966184364,
330.6857814538206
]
}
},
"version": 0.4
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 229 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 305 KiB

View File

@@ -1,470 +0,0 @@
{
"last_node_id": 52,
"last_link_id": 105,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1152, 48],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 63
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 33,
"type": "EmptySD3LatentImage",
"pos": [576, 336],
"size": [210, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [66],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 48,
"type": "ImageScale",
"pos": [-320, 448],
"size": [315, 130],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 91
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [92],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageScale"
},
"widgets_values": ["bilinear", 1024, 1024, "center"]
},
{
"id": 49,
"type": "PreviewImage",
"pos": [384, 512],
"size": [443.1, 520.83],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 93
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 50,
"type": "ConditioningZeroOut",
"pos": [203, 133],
"size": [317.4, 26],
"flags": {
"collapsed": true
},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [102],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningZeroOut"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [816, 48],
"size": [284.12, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 103,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 104
},
{
"name": "latent_image",
"type": "LATENT",
"link": 66
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [63],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
790192293768778,
"randomize",
32,
4.5,
"euler",
"simple",
1
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1392, 48],
"size": [882.45, 927.85],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 13
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 45,
"type": "LoadImage",
"pos": [-666, 447],
"size": [288, 336],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [91]
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 47,
"type": "Canny",
"pos": [20, 449],
"size": [315, 82],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 92
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [93, 99],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "Canny"
},
"widgets_values": [0.4, 0.8]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [0, -128],
"size": [320, 192],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 65
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [98, 101],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"happy cute anime fox girl with massive fluffy fennec ears and blonde fluffy hair long hair blue eyes wearing a red scarf a pink sweater and blue jeans\n\nstanding in a beautiful forest with mountains\n\n"
]
},
{
"id": 51,
"type": "ControlNetApplyAdvanced",
"pos": [470, 60],
"size": [315, 186],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 101
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 102
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 100
},
{
"name": "image",
"type": "IMAGE",
"link": 99
},
{
"name": "vae",
"type": "VAE",
"shape": 7,
"link": 105
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [103],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [104],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ControlNetApplyAdvanced"
},
"widgets_values": [0.66, 0, 1]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-576, 64],
"size": [499.99, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [14],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [65],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8, 105],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd3.5_large_fp8_scaled.safetensors"]
},
{
"id": 46,
"type": "ControlNetLoader",
"pos": [-128, 320],
"size": [460.34, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"shape": 3,
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["sd3.5_large_controlnet_canny.safetensors"]
},
{
"id": 52,
"type": "MarkdownNote",
"pos": [-570, 210],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sd3/#sd35-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[8, 4, 2, 8, 1, "VAE"],
[13, 8, 0, 9, 0, "IMAGE"],
[14, 4, 0, 3, 0, "MODEL"],
[63, 3, 0, 8, 0, "LATENT"],
[65, 4, 1, 6, 0, "CLIP"],
[66, 33, 0, 3, 3, "LATENT"],
[91, 45, 0, 48, 0, "IMAGE"],
[92, 48, 0, 47, 0, "IMAGE"],
[93, 47, 0, 49, 0, "IMAGE"],
[98, 6, 0, 50, 0, "CONDITIONING"],
[99, 47, 0, 51, 3, "IMAGE"],
[100, 46, 0, 51, 2, "CONTROL_NET"],
[101, 6, 0, 51, 0, "CONDITIONING"],
[102, 50, 0, 51, 1, "CONDITIONING"],
[103, 51, 0, 3, 1, "CONDITIONING"],
[104, 51, 1, 3, 2, "CONDITIONING"],
[105, 4, 2, 51, 4, "VAE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.91,
"offset": [686.52, 188.52]
}
},
"version": 0.4,
"models": [
{
"name": "sd3.5_large_controlnet_canny.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_canny.safetensors?download=true",
"directory": "controlnet"
},
{
"name": "sd3.5_large_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-3.5-fp8/resolve/main/sd3.5_large_fp8_scaled.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 190 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

View File

@@ -1,713 +0,0 @@
{
"last_node_id": 60,
"last_link_id": 121,
"nodes": [
{
"id": 9,
"type": "SaveImage",
"pos": [
1097,
-14
],
"size": [
845.74560546875,
898.2359619140625
],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 13
}
],
"outputs": [],
"properties": {},
"widgets_values": [
"ComfyUI"
]
},
{
"id": 33,
"type": "EmptySD3LatentImage",
"pos": [
430,
250
],
"size": [
300.9447021484375,
106
],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [
115
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [
1024,
1024,
1
]
},
{
"id": 45,
"type": "LoadImage",
"pos": [
-10,
600
],
"size": [
288,
336
],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [
91
]
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": [
"vcmd_create_dragon_mascot_characters_that_best_suitable_for_Sin_5ba6beab-2ad7-4810-997e-387c27bea297.png",
"image"
]
},
{
"id": 46,
"type": "ControlNetLoader",
"pos": [
0,
490
],
"size": [
623.134033203125,
58
],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"shape": 3,
"links": [
87
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": [
"sd3.5_large_controlnet_depth.safetensors"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
780,
315
],
"size": [
278.8823547363281,
46.5799446105957
],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 116
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
13
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 56,
"type": "KSampler",
"pos": [
765,
-15
],
"size": [
315,
262
],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 112
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 113
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 114
},
{
"name": "latent_image",
"type": "LATENT",
"link": 115
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
116
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
0,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 58,
"type": "ConditioningZeroOut",
"pos": [
135,
420
],
"size": [
317.4000244140625,
26
],
"flags": {
"collapsed": true
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 119
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
121
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningZeroOut"
}
},
{
"id": 57,
"type": "CLIPTextEncode",
"pos": [
-15,
150
],
"size": [
400,
200
],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 117
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
119,
120
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"hairy dragon stuffed toy with light green color in a fairy tale background, fluffy hair, standing with 2 legs"
]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [
-15,
0
],
"size": [
387.85345458984375,
98
],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
112
],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [
117
],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
8,
85
],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": [
"sd3.5_large_fp8_scaled.safetensors"
]
},
{
"id": 44,
"type": "ControlNetApplySD3",
"pos": [
420,
15
],
"size": [
315,
186
],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 120
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 121
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 87,
"slot_index": 2
},
{
"name": "vae",
"type": "VAE",
"link": 85,
"slot_index": 3
},
{
"name": "image",
"type": "IMAGE",
"link": 110,
"slot_index": 4
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"shape": 3,
"links": [
113
],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"shape": 3,
"links": [
114
],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ControlNetApplySD3"
},
"widgets_values": [
0.7000000000000001,
0,
1
]
},
{
"id": 54,
"type": "PreviewImage",
"pos": [
660,
495
],
"size": [
366.44989013671875,
340.7085266113281
],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 109
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 48,
"type": "ImageScale",
"pos": [
310,
600
],
"size": [
315,
130
],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 91
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [
103,
108
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageScale"
},
"widgets_values": [
"bilinear",
1024,
1024,
"center"
]
},
{
"id": 55,
"type": "DepthAnythingPreprocessor",
"pos": [
310,
770
],
"size": [
315,
82
],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 108
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
109,
110
]
}
],
"properties": {
"Node name for S&R": "DepthAnythingPreprocessor"
},
"widgets_values": [
"depth_anything_vitl14.pth",
1024
]
}
],
"links": [
[
8,
4,
2,
8,
1,
"VAE"
],
[
13,
8,
0,
9,
0,
"IMAGE"
],
[
85,
4,
2,
44,
3,
"VAE"
],
[
87,
46,
0,
44,
2,
"CONTROL_NET"
],
[
91,
45,
0,
48,
0,
"IMAGE"
],
[
92,
48,
0,
47,
0,
"IMAGE"
],
[
102,
47,
0,
53,
0,
"IMAGE"
],
[
108,
48,
0,
55,
0,
"IMAGE"
],
[
109,
55,
0,
54,
0,
"IMAGE"
],
[
110,
55,
0,
44,
4,
"IMAGE"
],
[
112,
4,
0,
56,
0,
"MODEL"
],
[
113,
44,
0,
56,
1,
"CONDITIONING"
],
[
114,
44,
1,
56,
2,
"CONDITIONING"
],
[
115,
33,
0,
56,
3,
"LATENT"
],
[
116,
56,
0,
8,
0,
"LATENT"
],
[
117,
4,
1,
57,
0,
"CLIP"
],
[
119,
57,
0,
58,
0,
"CONDITIONING"
],
[
120,
57,
0,
44,
0,
"CONDITIONING"
],
[
121,
58,
0,
44,
1,
"CONDITIONING"
]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.2331581182307068,
"offset": [
692.0972183417064,
84.29928193157562
]
}
},
"version": 0.4
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 326 KiB

View File

@@ -1,278 +0,0 @@
{
"last_node_id": 54,
"last_link_id": 102,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1200, 96],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 53,
"slot_index": 1
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1440, 96],
"size": [952.51, 1007.93],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 51,
"slot_index": 0
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 40,
"type": "CLIPTextEncode",
"pos": [384, 336],
"size": [432, 192],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [80],
"slot_index": 0
}
],
"title": "Negative Prompt",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 53,
"type": "EmptySD3LatentImage",
"pos": [480, 576],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-48, 96],
"size": [384.76, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [99],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [101, 102],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [53],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd3.5_large_fp8_scaled.safetensors"]
},
{
"id": 16,
"type": "CLIPTextEncode",
"pos": [384, 96],
"size": [432, 192],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 101
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [21],
"slot_index": 0
}
],
"title": "Positive Prompt",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a bottle with a pink and red galaxy inside it on top of a wooden table on a table in the middle of a modern kitchen with a window to the outdoors mountain range bright sun clouds forest"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 3,
"type": "KSampler",
"pos": [864, 96],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 99,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 21
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 80
},
{
"name": "latent_image",
"type": "LATENT",
"link": 100
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
585483408983215,
"randomize",
20,
4.01,
"euler",
"sgm_uniform",
1
]
},
{
"id": 54,
"type": "MarkdownNote",
"pos": [-45, 240],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sd3/#sd35)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[21, 16, 0, 3, 1, "CONDITIONING"],
[51, 8, 0, 9, 0, "IMAGE"],
[53, 4, 2, 8, 1, "VAE"],
[80, 40, 0, 3, 2, "CONDITIONING"],
[99, 4, 0, 3, 0, "MODEL"],
[100, 53, 0, 3, 3, "LATENT"],
[101, 4, 1, 16, 0, "CLIP"],
[102, 4, 1, 40, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.14,
"offset": [93.35, -1.71]
}
},
"version": 0.4,
"models": [
{
"name": "sd3.5_large_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-3.5-fp8/resolve/main/sd3.5_large_fp8_scaled.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 250 KiB

View File

@@ -1,728 +0,0 @@
{
"last_node_id": 49,
"last_link_id": 44,
"nodes": [
{
"id": 36,
"type": "Note",
"pos": [-74, -470],
"size": [315.7, 147.96],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint BASE",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 37,
"type": "Note",
"pos": [610, -460],
"size": [330, 140],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint REFINER",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations."
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 40,
"type": "Note",
"pos": [1325, 234],
"size": [451.5, 424.42],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - KSampler ADVANCED General Information",
"properties": {
"text": ""
},
"widgets_values": [
"Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)."
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [544.5, 651.12],
"size": [300, 110],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 17,
"type": "VAEDecode",
"pos": [2220.77, 129.6],
"size": [200, 50],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 25
},
{
"name": "vae",
"type": "VAE",
"link": 34
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": [],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 41,
"type": "Note",
"pos": [2160.77, 229.6],
"size": [320, 120],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - VAE Decoder",
"properties": {
"text": ""
},
"widgets_values": [
"This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG."
],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 42,
"type": "Note",
"pos": [564.5, 801.12],
"size": [260, 210],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Empty Latent Image",
"properties": {
"text": ""
},
"widgets_values": [
"This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 11,
"type": "KSamplerAdvanced",
"pos": [1800, 130],
"size": [300, 340],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 23
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 24
},
{
"name": "latent_image",
"type": "LATENT",
"link": 13
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 38,
"slot_index": 4
},
{
"name": "start_at_step",
"type": "INT",
"widget": {
"name": "start_at_step"
},
"link": 44
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [25],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - REFINER",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"disable",
0,
"fixed",
25,
8,
"euler",
"normal",
20,
10000,
"disable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 12,
"type": "CheckpointLoaderSimple",
"pos": [600, -611],
"size": [350, 100],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [14],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [19, 20],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [34],
"slot_index": 2
}
],
"title": "Load Checkpoint - REFINER",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_refiner_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-90, -620],
"size": [350, 100],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [10],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [],
"slot_index": 2
}
],
"title": "Load Checkpoint - BASE",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 47,
"type": "PrimitiveNode",
"pos": [1037.53, 881.61],
"size": [210, 82],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"links": [43, 44],
"slot_index": 0
}
],
"title": "end_at_step",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [20, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 45,
"type": "PrimitiveNode",
"pos": [1039.53, 734.61],
"size": [210, 82],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "steps"
},
"links": [38, 41]
}
],
"title": "steps",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [25, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 48,
"type": "Note",
"pos": [1036, 1018],
"size": [213.91, 110.17],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"These can be used to control the total sampling steps and the step at which the sampling switches to the refiner."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "KSamplerAdvanced",
"pos": [1000, 230],
"size": [300, 334],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 10
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 11
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 12
},
{
"name": "latent_image",
"type": "LATENT",
"link": 27
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 41,
"slot_index": 4
},
{
"name": "end_at_step",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"link": 43,
"slot_index": 5
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [13],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - BASE",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"enable",
6767725640732,
"randomize",
25,
8,
"euler",
"normal",
0,
20,
"enable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 16,
"type": "CLIPTextEncode",
"pos": [1110, -90],
"size": [340, 140],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [1110, -270],
"size": [340, 140],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"daytime scenery sky nature dark blue bottle with a galaxy stars milky way in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [610, 30],
"size": [320, 160],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [11],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["daytime sky nature dark blue galaxy bottle"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [610, 240],
"size": [320, 150],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 19,
"type": "SaveImage",
"pos": [2600, 130],
"size": [735.55, 823.98],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 28
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"],
"color": "#222",
"bgcolor": "#000"
},
{
"id": 49,
"type": "MarkdownNote",
"pos": [-90, -255],
"size": [225, 60],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[3, 4, 1, 6, 0, "CLIP"],
[5, 4, 1, 7, 0, "CLIP"],
[10, 4, 0, 10, 0, "MODEL"],
[11, 6, 0, 10, 1, "CONDITIONING"],
[12, 7, 0, 10, 2, "CONDITIONING"],
[13, 10, 0, 11, 3, "LATENT"],
[14, 12, 0, 11, 0, "MODEL"],
[19, 12, 1, 15, 0, "CLIP"],
[20, 12, 1, 16, 0, "CLIP"],
[23, 15, 0, 11, 1, "CONDITIONING"],
[24, 16, 0, 11, 2, "CONDITIONING"],
[25, 11, 0, 17, 0, "LATENT"],
[27, 5, 0, 10, 3, "LATENT"],
[28, 17, 0, 19, 0, "IMAGE"],
[34, 12, 2, 17, 1, "VAE"],
[38, 45, 0, 11, 4, "INT"],
[41, 45, 0, 10, 4, "INT"],
[43, 47, 0, 10, 5, "INT"],
[44, 47, 0, 11, 5, "INT"]
],
"groups": [
{
"id": 1,
"title": "Base Prompt",
"bounding": [585, -60, 366, 463],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Refiner Prompt",
"bounding": [1095, -360, 376, 429],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Load in BASE SDXL Model",
"bounding": [-105, -705, 369, 399],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Load in REFINER SDXL Model",
"bounding": [585, -705, 391, 400],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "Empty Latent Image",
"bounding": [525, 570, 339, 443],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "VAE Decoder",
"bounding": [2145, 45, 360, 350],
"color": "#b06634",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "Step Control",
"bounding": [1005, 630, 284, 524],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.63,
"offset": [1264.03, 812.09]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "sd_xl_refiner_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 451 KiB

View File

@@ -1,490 +0,0 @@
{
"last_node_id": 41,
"last_link_id": 106,
"nodes": [
{
"id": 13,
"type": "CLIPVisionEncode",
"pos": [135, -63],
"size": [253.6, 78],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 101
},
{
"name": "image",
"type": "IMAGE",
"link": 95
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 36,
"type": "CLIPVisionEncode",
"pos": [137, 24],
"size": [253.6, 78],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 102
},
{
"name": "image",
"type": "IMAGE",
"link": 98
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1277, -210],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 106
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-130, -295],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["anime"]
},
{
"id": 34,
"type": "LoadImage",
"pos": [-352, -29],
"size": [435.35, 377.59],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["mountains.png", "image"]
},
{
"id": 38,
"type": "LoadImage",
"pos": [-341, 412],
"size": [435.35, 377.59],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [98],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sunset.png", "image"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [425, -18],
"size": [425.28, 180.61],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 105
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 40,
"type": "CheckpointLoaderSimple",
"pos": [-761, -275],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [103],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [104, 105],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [106],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"]
},
{
"id": 39,
"type": "CLIPVisionLoader",
"pos": [-760, -120],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [101, 102],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": ["clip_vision_g.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1542, -209],
"size": [635.19, 692.82],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [915, -218],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 103
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 97
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
133632471276133,
"randomize",
26,
8,
"dpmpp_3m_sde_gpu",
"exponential",
1
]
},
{
"id": 19,
"type": "unCLIPConditioning",
"pos": [347, -207],
"size": [262, 102],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 23
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 24
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [96],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 37,
"type": "unCLIPConditioning",
"pos": [626, -205],
"size": [262, 102],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 96
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 100
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [534, 214],
"size": [315, 106],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 41,
"type": "MarkdownNote",
"pos": [-750, -15],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/#revision)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[23, 6, 0, 19, 0, "CONDITIONING"],
[24, 13, 0, 19, 1, "CLIP_VISION_OUTPUT"],
[95, 34, 0, 13, 1, "IMAGE"],
[96, 19, 0, 37, 0, "CONDITIONING"],
[97, 37, 0, 3, 1, "CONDITIONING"],
[98, 38, 0, 36, 1, "IMAGE"],
[100, 36, 0, 37, 1, "CLIP_VISION_OUTPUT"],
[101, 39, 0, 13, 0, "CLIP_VISION"],
[102, 39, 0, 36, 0, "CLIP_VISION"],
[103, 40, 0, 3, 0, "MODEL"],
[104, 40, 1, 6, 0, "CLIP"],
[105, 40, 1, 7, 0, "CLIP"],
[106, 40, 2, 8, 1, "VAE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [962.72, 417.65]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "clip_vision_g.safetensors",
"url": "https://huggingface.co/comfyanonymous/clip_vision_g/resolve/main/clip_vision_g.safetensors?download=true",
"directory": "clip_vision"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 274 KiB

View File

@@ -1,494 +0,0 @@
{
"last_node_id": 43,
"last_link_id": 111,
"nodes": [
{
"id": 13,
"type": "CLIPVisionEncode",
"pos": [135, -63],
"size": [253.6, 78],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 101
},
{
"name": "image",
"type": "IMAGE",
"link": 95
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 36,
"type": "CLIPVisionEncode",
"pos": [137, 24],
"size": [253.6, 78],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 102
},
{
"name": "image",
"type": "IMAGE",
"link": 98
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1277, -210],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 106
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 34,
"type": "LoadImage",
"pos": [-352, -29],
"size": [435.35, 377.59],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["mountains.png", "image"]
},
{
"id": 38,
"type": "LoadImage",
"pos": [-341, 412],
"size": [435.35, 377.59],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [98],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sunset.png", "image"]
},
{
"id": 40,
"type": "CheckpointLoaderSimple",
"pos": [-761, -275],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [103],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [104],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [106],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"]
},
{
"id": 39,
"type": "CLIPVisionLoader",
"pos": [-760, -120],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [101, 102],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": ["clip_vision_g.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1542, -209],
"size": [635.19, 692.82],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [915, -218],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 103
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 97
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 111
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
133632471276133,
"randomize",
26,
8,
"dpmpp_3m_sde_gpu",
"exponential",
1
]
},
{
"id": 37,
"type": "unCLIPConditioning",
"pos": [626, -205],
"size": [262, 102],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 96
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 100
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [534, 214],
"size": [315, 106],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 19,
"type": "unCLIPConditioning",
"pos": [347, -207],
"size": [262, 102],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 110
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 24
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [96],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 42,
"type": "ConditioningZeroOut",
"pos": [60, -211],
"size": [211.6, 26],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 109,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [110],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningZeroOut"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-182, -184],
"size": [422.85, 164.31],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [109, 111],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""]
},
{
"id": 43,
"type": "MarkdownNote",
"pos": [-750, -15],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/#revision)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[24, 13, 0, 19, 1, "CLIP_VISION_OUTPUT"],
[95, 34, 0, 13, 1, "IMAGE"],
[96, 19, 0, 37, 0, "CONDITIONING"],
[97, 37, 0, 3, 1, "CONDITIONING"],
[98, 38, 0, 36, 1, "IMAGE"],
[100, 36, 0, 37, 1, "CLIP_VISION_OUTPUT"],
[101, 39, 0, 13, 0, "CLIP_VISION"],
[102, 39, 0, 36, 0, "CLIP_VISION"],
[103, 40, 0, 3, 0, "MODEL"],
[104, 40, 1, 6, 0, "CLIP"],
[106, 40, 2, 8, 1, "VAE"],
[109, 6, 0, 42, 0, "CONDITIONING"],
[110, 42, 0, 19, 0, "CONDITIONING"],
[111, 6, 0, 3, 2, "CONDITIONING"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.49,
"offset": [1046.06, 311.39]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "clip_vision_g.safetensors",
"url": "https://huggingface.co/comfyanonymous/clip_vision_g/resolve/main/clip_vision_g.safetensors?download=true",
"directory": "clip_vision"
}
]
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 186 KiB

View File

@@ -1,896 +0,0 @@
{
"last_node_id": 49,
"last_link_id": 44,
"nodes": [
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [1139.11, -121.79],
"size": [210, 54],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 21,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"evening sunset scenery blue sky nature, glass bottle with a galaxy in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 16,
"type": "CLIPTextEncode",
"pos": [1139.11, -31.79],
"size": [210, 54],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 22,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 14,
"type": "PrimitiveNode",
"pos": [117.74, 335.18],
"size": [300, 160],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"widget": {
"name": "text"
},
"links": [18, 22],
"slot_index": 0
}
],
"title": "Negative Prompt (Text)",
"properties": {
"Run widget replace on values": false
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 13,
"type": "PrimitiveNode",
"pos": [117.74, 135.18],
"size": [300, 160],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"widget": {
"name": "text"
},
"links": [16, 21],
"slot_index": 0
}
],
"title": "Positive Prompt (Text)",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [
"evening sunset scenery blue sky nature, glass bottle with a galaxy in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 36,
"type": "Note",
"pos": [-74, -470],
"size": [315.7, 147.96],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint BASE",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 37,
"type": "Note",
"pos": [610, -460],
"size": [330, 140],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint REFINER",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations."
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 38,
"type": "Note",
"pos": [126.74, 534.18],
"size": [284.33, 123.89],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Text Prompts",
"properties": {
"text": ""
},
"widgets_values": [
"These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type."
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 40,
"type": "Note",
"pos": [1325, 234],
"size": [451.5, 424.42],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - KSampler ADVANCED General Information",
"properties": {
"text": ""
},
"widgets_values": [
"Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)."
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [544.5, 651.12],
"size": [300, 110],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 17,
"type": "VAEDecode",
"pos": [2220.77, 129.6],
"size": [200, 50],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 25
},
{
"name": "vae",
"type": "VAE",
"link": 34
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": [],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 41,
"type": "Note",
"pos": [2160.77, 229.6],
"size": [320, 120],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - VAE Decoder",
"properties": {
"text": ""
},
"widgets_values": [
"This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG."
],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 42,
"type": "Note",
"pos": [564.5, 801.12],
"size": [260, 210],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Empty Latent Image",
"properties": {
"text": ""
},
"widgets_values": [
"This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 43,
"type": "Note",
"pos": [1125, 70],
"size": [240, 80],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - CLIP Encode (REFINER)",
"properties": {
"text": ""
},
"widgets_values": [
"These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [599.5, 269.48],
"size": [210, 54],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 16,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [11],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"evening sunset scenery blue sky nature, glass bottle with a galaxy in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [599.5, 359.48],
"size": [210, 54],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 18,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 39,
"type": "Note",
"pos": [599.5, 449.48],
"size": [210, 80],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - CLIP Encode (BASE)",
"properties": {
"text": ""
},
"widgets_values": [
"These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 11,
"type": "KSamplerAdvanced",
"pos": [1800, 130],
"size": [300, 340],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 23
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 24
},
{
"name": "latent_image",
"type": "LATENT",
"link": 13
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 38,
"slot_index": 4
},
{
"name": "start_at_step",
"type": "INT",
"widget": {
"name": "start_at_step"
},
"link": 44
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [25],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - REFINER",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"disable",
0,
"fixed",
25,
8,
"euler",
"normal",
20,
10000,
"disable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 12,
"type": "CheckpointLoaderSimple",
"pos": [600, -611],
"size": [350, 100],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [14],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [19, 20],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [34],
"slot_index": 2
}
],
"title": "Load Checkpoint - REFINER",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_refiner_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-90, -620],
"size": [350, 100],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [10],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [],
"slot_index": 2
}
],
"title": "Load Checkpoint - BASE",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 19,
"type": "SaveImage",
"pos": [2600, 130],
"size": [565.77, 596.38],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 28
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"],
"color": "#222",
"bgcolor": "#000"
},
{
"id": 47,
"type": "PrimitiveNode",
"pos": [1037.53, 881.61],
"size": [210, 82],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"links": [43, 44],
"slot_index": 0
}
],
"title": "end_at_step",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [20, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 45,
"type": "PrimitiveNode",
"pos": [1039.53, 734.61],
"size": [210, 82],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "steps"
},
"links": [38, 41]
}
],
"title": "steps",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [25, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 48,
"type": "Note",
"pos": [1036, 1018],
"size": [213.91, 110.17],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"These can be used to control the total sampling steps and the step at which the sampling switches to the refiner."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "KSamplerAdvanced",
"pos": [1000, 230],
"size": [300, 334],
"flags": {},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 10
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 11
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 12
},
{
"name": "latent_image",
"type": "LATENT",
"link": 27
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 41,
"slot_index": 4
},
{
"name": "end_at_step",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"link": 43,
"slot_index": 5
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [13],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - BASE",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"enable",
721897303308196,
"randomize",
25,
8,
"euler",
"normal",
0,
20,
"enable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 49,
"type": "MarkdownNote",
"pos": [-105, -255],
"size": [225, 60],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[3, 4, 1, 6, 0, "CLIP"],
[5, 4, 1, 7, 0, "CLIP"],
[10, 4, 0, 10, 0, "MODEL"],
[11, 6, 0, 10, 1, "CONDITIONING"],
[12, 7, 0, 10, 2, "CONDITIONING"],
[13, 10, 0, 11, 3, "LATENT"],
[14, 12, 0, 11, 0, "MODEL"],
[16, 13, 0, 6, 1, "STRING"],
[18, 14, 0, 7, 1, "STRING"],
[19, 12, 1, 15, 0, "CLIP"],
[20, 12, 1, 16, 0, "CLIP"],
[21, 13, 0, 15, 1, "STRING"],
[22, 14, 0, 16, 1, "STRING"],
[23, 15, 0, 11, 1, "CONDITIONING"],
[24, 16, 0, 11, 2, "CONDITIONING"],
[25, 11, 0, 17, 0, "LATENT"],
[27, 5, 0, 10, 3, "LATENT"],
[28, 17, 0, 19, 0, "IMAGE"],
[34, 12, 2, 17, 1, "VAE"],
[38, 45, 0, 11, 4, "INT"],
[41, 45, 0, 10, 4, "INT"],
[43, 47, 0, 10, 5, "INT"],
[44, 47, 0, 11, 5, "INT"]
],
"groups": [
{
"id": 1,
"title": "Base Prompt",
"bounding": [585, 195, 252, 361],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Refiner Prompt",
"bounding": [1095, -195, 282, 372],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Text Prompts",
"bounding": [105, 45, 339, 622],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Load in BASE SDXL Model",
"bounding": [-105, -705, 369, 399],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "Load in REFINER SDXL Model",
"bounding": [585, -705, 391, 400],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "Empty Latent Image",
"bounding": [525, 570, 339, 443],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "VAE Decoder",
"bounding": [2145, 45, 360, 350],
"color": "#b06634",
"font_size": 24,
"flags": {}
},
{
"id": 8,
"title": "Step Control",
"bounding": [1005, 630, 284, 524],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.78,
"offset": [685.2, 1020.68]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "sd_xl_refiner_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors?download=true",
"directory": "checkpoints"
}
]
}

Some files were not shown because too many files have changed in this diff Show More