Add ComfyUI Examples workflows to in-app templates (#2541)

Co-authored-by: jojodecayz <121620462+jojodecayz@users.noreply.github.com>
Co-authored-by: github-actions <github-actions@github.com>
This commit is contained in:
bymyself
2025-02-15 14:15:56 -07:00
committed by GitHub
parent 00dceb880a
commit 77d3e0c45e
54 changed files with 21677 additions and 1160 deletions

View File

@@ -0,0 +1,688 @@
{
"last_node_id": 40,
"last_link_id": 38,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 14
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "ControlNetApply",
"pos": [340, 267],
"size": [317.4, 98],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 10
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 11
},
{
"name": "image",
"type": "IMAGE",
"link": 12
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1280, 704, 1]
},
{
"id": 17,
"type": "VAEDecode",
"pos": [2527, 369],
"size": [210, 46],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 18
},
{
"name": "vae",
"type": "VAE",
"link": 20
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [19],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [-8, 607],
"size": [425.28, 180.61],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 35
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 38
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 13
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 16],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1071823866653712,
"randomize",
10,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 35,
"type": "CLIPTextEncode",
"pos": [1310, -72],
"size": [425.28, 180.61],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 33
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-223, -93],
"size": [422.85, 164.31],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 36
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo) girl (flat chest:0.9), (fennec ears:1.0)\u00a0 (fox ears:1.0), (messy hair), eyes, standing (school uniform sweater) sky clouds nature national park beautiful winter snow (scenery HDR landscape)\n(sunset)\n"
]
},
{
"id": 12,
"type": "LoadImage",
"pos": [-280, 287],
"size": [365, 314],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [12],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["pose_worship.png", "image"]
},
{
"id": 13,
"type": "VAELoader",
"pos": [1098, 599],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [14, 20],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["kl-f8-anime2.ckpt"]
},
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [1321, -395],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 32
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [21],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"girl (flat chest:0.9), (fennec ears:0.8)\u00a0 (fox ears:0.8), (messy hair), (highlights), (realistic starry eyes pupil:1.1), standing (school uniform sweater)\nsky clouds nature national park beautiful winter snow scenery HDR landscape\n\n(sunset)\n\n"
]
},
{
"id": 36,
"type": "CheckpointLoaderSimple",
"pos": [570, -206],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [29],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [34],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AOM3A3.safetensors"]
},
{
"id": 37,
"type": "CLIPSetLastLayer",
"pos": [933, -183],
"size": [315, 58],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 34,
"slot_index": 0
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [32, 33],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 38,
"type": "CLIPSetLastLayer",
"pos": [-733, 375],
"size": [315, 58],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 37
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [35, 36],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 39,
"type": "CheckpointLoaderSimple",
"pos": [-1100, 302],
"size": [315, 98],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [38],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [37],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 14,
"type": "LatentUpscale",
"pos": [1486, 494],
"size": [315, 130],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 16
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1920, 1088, "disabled"]
},
{
"id": 18,
"type": "SaveImage",
"pos": [2769, 370],
"size": [357.86, 262.24],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 19
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 16,
"type": "KSampler",
"pos": [2011, 248],
"size": [315, 262],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 29
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 21
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 28
},
{
"name": "latent_image",
"type": "LATENT",
"link": 17
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
284006177305237,
"randomize",
8,
5,
"dpmpp_sde",
"simple",
0.52
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [353.07, 252.57],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 11,
"type": "ControlNetLoader",
"pos": [-250, 151],
"size": [450.9, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [11],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_openpose_fp16.safetensors"]
},
{
"id": 40,
"type": "MarkdownNote",
"pos": [-1095, 480],
"size": [225, 60],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#2-pass-pose-worship)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 6, 0, 10, 0, "CONDITIONING"],
[11, 11, 0, 10, 1, "CONTROL_NET"],
[12, 12, 0, 10, 2, "IMAGE"],
[13, 10, 0, 3, 1, "CONDITIONING"],
[14, 13, 0, 8, 1, "VAE"],
[16, 3, 0, 14, 0, "LATENT"],
[17, 14, 0, 16, 3, "LATENT"],
[18, 16, 0, 17, 0, "LATENT"],
[19, 17, 0, 18, 0, "IMAGE"],
[20, 13, 0, 17, 1, "VAE"],
[21, 15, 0, 16, 1, "CONDITIONING"],
[28, 35, 0, 16, 2, "CONDITIONING"],
[29, 36, 0, 16, 0, "MODEL"],
[32, 37, 0, 15, 0, "CLIP"],
[33, 37, 0, 35, 0, "CLIP"],
[34, 36, 1, 37, 0, "CLIP"],
[35, 38, 0, 7, 0, "CLIP"],
[36, 38, 0, 6, 0, "CLIP"],
[37, 39, 1, 38, 0, "CLIP"],
[38, 39, 0, 3, 0, "MODEL"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.88,
"offset": [1252.62, 517.93]
}
},
"version": 0.4,
"models": [
{
"name": "control_v11p_sd15_openpose_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_openpose_fp16.safetensors",
"directory": "controlnet"
},
{
"name": "Anything-V3.0.ckpt",
"url": "https://huggingface.co/xiaolxl/Stable-diffusion-models/resolve/main/Anything-V3.0.ckpt?download=true",
"directory": "checkpoints"
},
{
"name": "AOM3A3.safetensors",
"url": "https://huggingface.co/WarriorMama777/OrangeMixs/resolve/eb7490173381625e0403dd52b8051cb969093dc1/Models/AbyssOrangeMix3/AOM3A3.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "kl-f8-anime2.ckpt",
"url": "https://huggingface.co/hakurei/waifu-diffusion-v1-4/resolve/main/vae/kl-f8-anime2.ckpt?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,976 @@
{
"last_node_id": 48,
"last_link_id": 113,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1320, 302],
"size": [210, 46],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1575, 350],
"size": [210, 250],
"flags": {},
"order": 26,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [175, 496],
"size": [425.28, 180.61],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 100
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 12,
"type": "ConditioningCombine",
"pos": [834, -246],
"size": [342.6, 46],
"flags": {
"collapsed": false
},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 63
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 57
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAELoader",
"pos": [1041, 544],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [36, 51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 35,
"type": "ConditioningCombine",
"pos": [873, -705],
"size": [342.6, 46],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 61
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [63],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [695, 531],
"size": [315, 106],
"flags": {
"collapsed": false
},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [704, 1280, 1]
},
{
"id": 22,
"type": "LatentUpscale",
"pos": [1412, 79],
"size": [315, 130],
"flags": {},
"order": 25,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 41
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1088, 1920, "disabled"]
},
{
"id": 14,
"type": "CLIPTextEncode",
"pos": [-4, -994],
"size": [400, 200],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [95],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (night:1.3) (darkness) sky (black) (stars:1.2) (galaxy:1.2) (space) (universe)"
]
},
{
"id": 18,
"type": "ConditioningSetArea",
"pos": [482, -709],
"size": [312, 154],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 96
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [62],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 320, 1]
},
{
"id": 31,
"type": "VAEDecode",
"pos": [2783.3, -41],
"size": [210, 46],
"flags": {},
"order": 28,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 50
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [87],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 32,
"type": "SaveImage",
"pos": [3012.3, -42],
"size": [315, 250],
"flags": {},
"order": 29,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 87
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "KSampler",
"pos": [2421.3, -389],
"size": [315, 262],
"flags": {},
"order": 27,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 108
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 47
},
{
"name": "latent_image",
"type": "LATENT",
"link": 42
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [50],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1122440447966177,
"randomize",
14,
7,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [152, 265],
"size": [422.85, 164.31],
"flags": {
"collapsed": false
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 99
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [93],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(masterpiece) (best quality) beautiful landscape breathtaking amazing view nature photograph forest mountains ocean (sky) national park scenery"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [885, 136],
"size": [315, 262],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 107
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 54
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 41],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
335608130539327,
"randomize",
13,
8.5,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 11,
"type": "ConditioningSetArea",
"pos": [479, -454],
"size": [314, 154],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 97,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 512, 1]
},
{
"id": 19,
"type": "ConditioningCombine",
"pos": [1180, -151],
"size": [342.6, 46],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 58
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 94
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [54],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 10,
"type": "ConditioningCombine",
"pos": [803, -149],
"size": [342.6, 46],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 40
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 93
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [94],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 15,
"type": "ConditioningSetArea",
"pos": [466, -233],
"size": [299, 154],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 704, 1]
},
{
"id": 13,
"type": "CLIPTextEncode",
"pos": [-5, -729],
"size": [400, 200],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 103
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [96],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (evening:1.2) (sky:1.2) (clouds) (colorful) (HDR:1.2) (sunset:1.3)\n"
]
},
{
"id": 17,
"type": "CLIPTextEncode",
"pos": [11, -455],
"size": [400, 200],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(best quality) (daytime:1.2) sky (blue)\n"]
},
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [16, -217],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 101
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [98],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(masterpiece) (best quality) morning sky\n\n"]
},
{
"id": 34,
"type": "ConditioningSetArea",
"pos": [476, -932],
"size": [312, 154],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 95,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [61],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 0, 1.2]
},
{
"id": 44,
"type": "CLIPSetLastLayer",
"pos": [-363, 453],
"size": [315, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 106
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [99, 100, 101, 102, 103, 104],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 45,
"type": "CheckpointLoaderSimple",
"pos": [-849, 429],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [107],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [106],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 27,
"type": "CLIPTextEncode",
"pos": [1969.3, -336],
"size": [400, 200],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 113
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 26,
"type": "CLIPTextEncode",
"pos": [1965, -580],
"size": [400, 200],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 112
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) beautiful (HDR:1.2) (realistic:1.2) landscape breathtaking amazing view nature scenery photograph forest mountains ocean daytime night evening morning, (sky:1.2)\n"
]
},
{
"id": 47,
"type": "CLIPSetLastLayer",
"pos": [1569, -403],
"size": [315, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 111
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [112, 113],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 46,
"type": "CheckpointLoaderSimple",
"pos": [1217, -496],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [108],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [111],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AbyssOrangeMix2_hard.safetensors"]
},
{
"id": 48,
"type": "MarkdownNote",
"pos": [-840, 585],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[36, 20, 0, 8, 1, "VAE"],
[40, 15, 0, 10, 0, "CONDITIONING"],
[41, 3, 0, 22, 0, "LATENT"],
[42, 22, 0, 24, 3, "LATENT"],
[46, 26, 0, 24, 1, "CONDITIONING"],
[47, 27, 0, 24, 2, "CONDITIONING"],
[49, 8, 0, 9, 0, "IMAGE"],
[50, 24, 0, 31, 0, "LATENT"],
[51, 20, 0, 31, 1, "VAE"],
[54, 19, 0, 3, 1, "CONDITIONING"],
[57, 11, 0, 12, 1, "CONDITIONING"],
[58, 12, 0, 19, 0, "CONDITIONING"],
[61, 34, 0, 35, 0, "CONDITIONING"],
[62, 18, 0, 35, 1, "CONDITIONING"],
[63, 35, 0, 12, 0, "CONDITIONING"],
[87, 31, 0, 32, 0, "IMAGE"],
[93, 6, 0, 10, 1, "CONDITIONING"],
[94, 10, 0, 19, 1, "CONDITIONING"],
[95, 14, 0, 34, 0, "CONDITIONING"],
[96, 13, 0, 18, 0, "CONDITIONING"],
[97, 17, 0, 11, 0, "CONDITIONING"],
[98, 33, 0, 15, 0, "CONDITIONING"],
[99, 44, 0, 6, 0, "CLIP"],
[100, 44, 0, 7, 0, "CLIP"],
[101, 44, 0, 33, 0, "CLIP"],
[102, 44, 0, 17, 0, "CLIP"],
[103, 44, 0, 13, 0, "CLIP"],
[104, 44, 0, 14, 0, "CLIP"],
[106, 45, 1, 44, 0, "CLIP"],
[107, 45, 0, 3, 0, "MODEL"],
[108, 46, 0, 24, 0, "MODEL"],
[111, 46, 1, 47, 0, "CLIP"],
[112, 47, 0, 26, 0, "CLIP"],
[113, 47, 0, 27, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.44,
"offset": [1558.38, 1652.18]
}
},
"version": 0.4,
"models": [
{
"name": "Anything-V3.0.ckpt",
"url": "https://huggingface.co/xiaolxl/Stable-diffusion-models/resolve/main/Anything-V3.0.ckpt?download=true",
"directory": "checkpoints"
},
{
"name": "AbyssOrangeMix2_hard.safetensors",
"url": "https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/Models/AbyssOrangeMix2/AbyssOrangeMix2_hard.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,977 @@
{
"last_node_id": 48,
"last_link_id": 114,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1320, 302],
"size": [210, 46],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1575, 350],
"size": [210, 58],
"flags": {},
"order": 26,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [175, 496],
"size": [425.28, 180.61],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 106
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 12,
"type": "ConditioningCombine",
"pos": [834, -246],
"size": [342.6, 46],
"flags": {
"collapsed": false
},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 63
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 57
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAELoader",
"pos": [1041, 544],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [36, 51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 35,
"type": "ConditioningCombine",
"pos": [873, -705],
"size": [342.6, 46],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 61
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [63],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [695, 531],
"size": [315, 106],
"flags": {
"collapsed": false
},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [704, 1280, 1]
},
{
"id": 22,
"type": "LatentUpscale",
"pos": [1412, 79],
"size": [315, 130],
"flags": {},
"order": 25,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 41
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1088, 1920, "disabled"]
},
{
"id": 14,
"type": "CLIPTextEncode",
"pos": [-4, -994],
"size": [400, 200],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 110
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [89],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (night:1.3) (darkness) sky (black) (stars:1.2) (galaxy:1.2) (space) (universe)"
]
},
{
"id": 13,
"type": "CLIPTextEncode",
"pos": [-5, -729],
"size": [400, 200],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 109
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [91],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) (evening:1.2) (sky:1.2) (clouds) (colorful) (HDR:1.2) (sunset:1.3)\n"
]
},
{
"id": 17,
"type": "CLIPTextEncode",
"pos": [11, -455],
"size": [400, 200],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 108
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [90],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(best quality) (daytime:1.2) sky (blue)\n"]
},
{
"id": 18,
"type": "ConditioningSetArea",
"pos": [482, -709],
"size": [312, 154],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 90
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [62],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 320, 1]
},
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [16, -217],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 107
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [92],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["(masterpiece) (best quality) morning sky\n\n"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [152, 265],
"size": [422.85, 164.31],
"flags": {
"collapsed": false
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 105
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [93],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(masterpiece) (best quality) beautiful landscape breathtaking amazing view nature photograph forest mountains ocean (sky) national park scenery"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [885, 136],
"size": [315, 262],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 104
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 54
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 41],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
823155751257884,
"randomize",
13,
8.5,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 11,
"type": "ConditioningSetArea",
"pos": [479, -454],
"size": [314, 154],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 91,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 512, 1]
},
{
"id": 19,
"type": "ConditioningCombine",
"pos": [1180, -151],
"size": [342.6, 46],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 58
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 94
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [54],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 10,
"type": "ConditioningCombine",
"pos": [803, -149],
"size": [342.6, 46],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 40
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 93
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [94],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 34,
"type": "ConditioningSetArea",
"pos": [476, -932],
"size": [312, 154],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 92,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [61],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 0, 1]
},
{
"id": 15,
"type": "ConditioningSetArea",
"pos": [466, -233],
"size": [299, 154],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 89
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [704, 384, 0, 704, 1.5]
},
{
"id": 44,
"type": "CheckpointLoaderSimple",
"pos": [-703, 444],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [104],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [111],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 46,
"type": "CLIPSetLastLayer",
"pos": [-354, 244],
"size": [315, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 111,
"slot_index": 0
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [105, 106, 107, 108, 109, 110],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 24,
"type": "KSampler",
"pos": [2220, -398],
"size": [315, 262],
"flags": {},
"order": 27,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 95
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 47
},
{
"name": "latent_image",
"type": "LATENT",
"link": 42
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [50],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
418330692116968,
"randomize",
14,
7,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 32,
"type": "SaveImage",
"pos": [2825, -62],
"size": [315, 58],
"flags": {},
"order": 29,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 87
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 31,
"type": "VAEDecode",
"pos": [2590, -61],
"size": [210, 46],
"flags": {},
"order": 28,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 50
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [87],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 26,
"type": "CLIPTextEncode",
"pos": [1781, -571],
"size": [400, 200],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 113
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(best quality) beautiful (HDR:1.2) (realistic:1.2) landscape breathtaking amazing view nature scenery photograph forest mountains ocean daytime night evening morning, (sky:1.2)\n"
]
},
{
"id": 27,
"type": "CLIPTextEncode",
"pos": [1787, -317],
"size": [400, 200],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 114
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 47,
"type": "CLIPSetLastLayer",
"pos": [1407, -402],
"size": [315, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 112
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [113, 114],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 45,
"type": "CheckpointLoaderSimple",
"pos": [1074, -444],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [95],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [112],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AbyssOrangeMix2_hard.safetensors"]
},
{
"id": 48,
"type": "MarkdownNote",
"pos": [-690, 615],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[36, 20, 0, 8, 1, "VAE"],
[40, 15, 0, 10, 0, "CONDITIONING"],
[41, 3, 0, 22, 0, "LATENT"],
[42, 22, 0, 24, 3, "LATENT"],
[46, 26, 0, 24, 1, "CONDITIONING"],
[47, 27, 0, 24, 2, "CONDITIONING"],
[49, 8, 0, 9, 0, "IMAGE"],
[50, 24, 0, 31, 0, "LATENT"],
[51, 20, 0, 31, 1, "VAE"],
[54, 19, 0, 3, 1, "CONDITIONING"],
[57, 11, 0, 12, 1, "CONDITIONING"],
[58, 12, 0, 19, 0, "CONDITIONING"],
[61, 34, 0, 35, 0, "CONDITIONING"],
[62, 18, 0, 35, 1, "CONDITIONING"],
[63, 35, 0, 12, 0, "CONDITIONING"],
[87, 31, 0, 32, 0, "IMAGE"],
[89, 14, 0, 15, 0, "CONDITIONING"],
[90, 17, 0, 18, 0, "CONDITIONING"],
[91, 13, 0, 11, 0, "CONDITIONING"],
[92, 33, 0, 34, 0, "CONDITIONING"],
[93, 6, 0, 10, 1, "CONDITIONING"],
[94, 10, 0, 19, 1, "CONDITIONING"],
[95, 45, 0, 24, 0, "MODEL"],
[104, 44, 0, 3, 0, "MODEL"],
[105, 46, 0, 6, 0, "CLIP"],
[106, 46, 0, 7, 0, "CLIP"],
[107, 46, 0, 33, 0, "CLIP"],
[108, 46, 0, 17, 0, "CLIP"],
[109, 46, 0, 13, 0, "CLIP"],
[110, 46, 0, 14, 0, "CLIP"],
[111, 44, 1, 46, 0, "CLIP"],
[112, 45, 1, 47, 0, "CLIP"],
[113, 47, 0, 26, 0, "CLIP"],
[114, 47, 0, 27, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.79,
"offset": [1022.96, -230.7]
}
},
"version": 0.4,
"models": [
{
"name": "Anything-V3.0.ckpt",
"url": "https://huggingface.co/xiaolxl/Stable-diffusion-models/resolve/main/Anything-V3.0.ckpt?download=true",
"directory": "checkpoints"
},
{
"name": "AbyssOrangeMix2_hard.safetensors",
"url": "https://huggingface.co/WarriorMama777/OrangeMixs/resolve/main/Models/AbyssOrangeMix2/AbyssOrangeMix2_hard.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,625 @@
{
"last_node_id": 50,
"last_link_id": 108,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1320, 302],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 36
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [175, 496],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 101
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 27,
"type": "CLIPTextEncode",
"pos": [1570, -336],
"size": [400, 200],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 103
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis) (pumpkin)"
]
},
{
"id": 22,
"type": "LatentUpscale",
"pos": [1412, 79],
"size": [315, 130],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 41
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1920, 1088, "disabled"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [695, 531],
"size": [315, 106],
"flags": {
"collapsed": false
},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1280, 704, 1]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1556, 303],
"size": [210, 250],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [156, 269],
"size": [422.85, 164.31],
"flags": {
"collapsed": false
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [98],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo:1.3) (best quality) (HDR:1.0) colourful, nature wilderness snow mountain peak, (winter:1.2), on landscape mountain in Switzerland alps sunset, aerial view (cityscape:1.3) skyscrapers modern city satellite view, (sunset)\ngirl with fennec ears fox ears, sweater, sitting\n"
]
},
{
"id": 47,
"type": "ConditioningCombine",
"pos": [530, 71],
"size": [342.6, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning_1",
"type": "CONDITIONING",
"link": 97
},
{
"name": "conditioning_2",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [99],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningCombine"
},
"widgets_values": []
},
{
"id": 45,
"type": "CLIPTextEncode",
"pos": [-88, -224],
"size": [400, 200],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 105,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [93],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo:1.3) (best quality) (HDR:1.0) girl colourful of (flat chest:0.9), (fennec ears:1.0)\u00a0 (fox ears:1.0), blonde twintails medium (messy hair:1.2), (eyes:1.0), sweater, (pink:0.8) , long sleeves, sweatpants (pants), gloves, nature wilderness (sitting:1.3) on snow mountain peak, (:d:0.5) (blush:0.9), (winter:1.2), on landscape mountain in Switzerland alps sunset, comfortable, (spread legs:1.1), aerial view (cityscape:1.3) skyscrapers modern city satellite view, (sunset)"
]
},
{
"id": 31,
"type": "VAEDecode",
"pos": [2419, 10],
"size": [210, 46],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 50
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 46,
"type": "ConditioningSetArea",
"pos": [344, -227],
"size": [317.4, 154],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 93
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningSetArea"
},
"widgets_values": [640, 640, 0, 64, 1]
},
{
"id": 26,
"type": "CLIPTextEncode",
"pos": [1573, -583],
"size": [400, 200],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece solo (realistic) (best quality) (HDR:1.0) girl colourful of (flat chest:0.9), (fox ears:0.9), blonde twintails messy hair, (eyes:1.0), sweater, (pink:0.8) , long sleeves, sweatpants pants, gloves, nature wilderness sitting on snow mountain peak aerial view, (:d:0.5) (blush:0.9), (winter:0.9), mountain in Switzerland, comfortable, aerial view (cityscape:1.2) skyscrapers modern city satellite view, (sunset)\n"
]
},
{
"id": 3,
"type": "KSampler",
"pos": [885, 136],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 106
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 99
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 41],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
830459492315490,
"randomize",
13,
7,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 32,
"type": "SaveImage",
"pos": [2648, -11],
"size": [210, 250],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 100
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "KSampler",
"pos": [2047, -270],
"size": [315, 262],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 107
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 47
},
{
"name": "latent_image",
"type": "LATENT",
"link": 42
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [50],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
626842672818096,
"randomize",
7,
5,
"dpmpp_sde",
"simple",
0.52
]
},
{
"id": 20,
"type": "VAELoader",
"pos": [1086, 563],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [36, 51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 49,
"type": "CLIPSetLastLayer",
"pos": [-227, 630],
"size": [315, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 108
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [101, 102, 103, 104, 105],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 48,
"type": "CheckpointLoaderSimple",
"pos": [-621, 603],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [106, 107],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [108],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 50,
"type": "MarkdownNote",
"pos": [-615, 765],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/area_composition/#increasing-consistency-of-images-with-area-composition)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[36, 20, 0, 8, 1, "VAE"],
[41, 3, 0, 22, 0, "LATENT"],
[42, 22, 0, 24, 3, "LATENT"],
[46, 26, 0, 24, 1, "CONDITIONING"],
[47, 27, 0, 24, 2, "CONDITIONING"],
[49, 8, 0, 9, 0, "IMAGE"],
[50, 24, 0, 31, 0, "LATENT"],
[51, 20, 0, 31, 1, "VAE"],
[93, 45, 0, 46, 0, "CONDITIONING"],
[97, 46, 0, 47, 0, "CONDITIONING"],
[98, 6, 0, 47, 1, "CONDITIONING"],
[99, 47, 0, 3, 1, "CONDITIONING"],
[100, 31, 0, 32, 0, "IMAGE"],
[101, 49, 0, 7, 0, "CLIP"],
[102, 49, 0, 6, 0, "CLIP"],
[103, 49, 0, 27, 0, "CLIP"],
[104, 49, 0, 26, 0, "CLIP"],
[105, 49, 0, 45, 0, "CLIP"],
[106, 48, 0, 3, 0, "MODEL"],
[107, 48, 0, 24, 0, "MODEL"],
[108, 48, 1, 49, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.53,
"offset": [1214.17, 1188.8]
}
},
"version": 0.4,
"models": [
{
"name": "Anything-V3.0.ckpt",
"url": "https://huggingface.co/xiaolxl/Stable-diffusion-models/resolve/main/Anything-V3.0.ckpt?download=true",
"directory": "checkpoints"
},
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,390 @@
{
"last_node_id": 15,
"last_link_id": 21,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1210, 250],
"size": [210, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 14
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-42, -147],
"size": [422.85, 164.31],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 21
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo) girl (flat chest:0.9), (fennec ears:1.1)\u00a0 (fox ears:1.1), (blonde hair:1.0), messy hair, sky clouds, standing in a grass field, (chibi), blue eyes"
]
},
{
"id": 12,
"type": "ControlNetLoader",
"pos": [-50, 69],
"size": [422, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_scribble_fp16.safetensors"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [355, 213],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [16],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis)"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [439, 446],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 13,
"type": "VAELoader",
"pos": [833, 484],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [14],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["vae-ft-mse-840000-ema-pruned.safetensors"]
},
{
"id": 10,
"type": "ControlNetApply",
"pos": [459, 51],
"size": [317.4, 98],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 10
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 13
},
{
"name": "image",
"type": "IMAGE",
"link": 12
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [0.9]
},
{
"id": 11,
"type": "LoadImage",
"pos": [-70, 177],
"size": [387.97, 465.51],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [12],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["input_scribble_example.png", "image"]
},
{
"id": 14,
"type": "CheckpointLoaderSimple",
"pos": [-448, 231],
"size": [315, 98],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [19],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [20, 21],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["Anything-V3.0.ckpt"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1453, 247],
"size": [393.62, 449.16],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [842, 150],
"size": [315, 262],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 19
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 18
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 16
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1002496614778823,
"randomize",
16,
6,
"uni_pc",
"normal",
1
]
},
{
"id": 15,
"type": "MarkdownNote",
"pos": [-450, 375],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 6, 0, 10, 0, "CONDITIONING"],
[12, 11, 0, 10, 2, "IMAGE"],
[13, 12, 0, 10, 1, "CONTROL_NET"],
[14, 13, 0, 8, 1, "VAE"],
[16, 7, 0, 3, 2, "CONDITIONING"],
[18, 10, 0, 3, 1, "CONDITIONING"],
[19, 14, 0, 3, 0, "MODEL"],
[20, 14, 1, 7, 0, "CLIP"],
[21, 14, 1, 6, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.8,
"offset": [843.77, 555.93]
}
},
"version": 0.4,
"models": [
{
"name": "Anything-V3.0.ckpt",
"url": "https://huggingface.co/xiaolxl/Stable-diffusion-models/resolve/main/Anything-V3.0.ckpt?download=true",
"directory": "checkpoints"
},
{
"name": "control_v11p_sd15_scribble_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_scribble_fp16.safetensors?download=true",
"directory": "controlnet"
},
{
"name": "vae-ft-mse-840000-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/vae-ft-mse-840000-ema-pruned.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,378 @@
{
"last_node_id": 35,
"last_link_id": 52,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1053, 172],
"size": [210, 46],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 51
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 23,
"type": "ControlNetApply",
"pos": [593.6, -388.0],
"size": [317.4, 98],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 42
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 47
},
{
"name": "image",
"type": "IMAGE",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1.0]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [259, 463],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [832, 384, 1]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [169, 212],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 49
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2)"
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1310, 169],
"size": [516.05, 301.24],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 33,
"type": "DiffControlNetLoader",
"pos": [131, -338],
"size": [421.93, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 48
}
],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [47],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DiffControlNetLoader"
},
"widgets_values": ["diff_control_sd15_depth_fp16.safetensors"]
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-305, -435],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 50
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["underwater photograph shark\n\n\n\n"]
},
{
"id": 3,
"type": "KSampler",
"pos": [699, 167],
"size": [315, 262],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 52
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
891858402356003,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 20,
"type": "LoadImage",
"pos": [135, -234],
"size": [429.73, 314],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [34],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["shark_depthmap.png", "image"]
},
{
"id": 34,
"type": "CheckpointLoaderSimple",
"pos": [-281, 110],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [48, 52],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [49, 50],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [51],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 35,
"type": "MarkdownNote",
"pos": [-270, 255],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#pose-controlnet)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[34, 20, 0, 23, 2, "IMAGE"],
[40, 23, 0, 3, 1, "CONDITIONING"],
[42, 24, 0, 23, 0, "CONDITIONING"],
[47, 33, 0, 23, 1, "CONTROL_NET"],
[48, 34, 0, 33, 0, "MODEL"],
[49, 34, 1, 7, 0, "CLIP"],
[50, 34, 1, 24, 0, "CLIP"],
[51, 34, 2, 8, 1, "VAE"],
[52, 34, 0, 3, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Apply Depth ControlNet",
"bounding": [210, -480, 739, 336],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.86,
"offset": [671.97, 711.84]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "diff_control_sd15_depth_fp16.safetensors",
"url": "https://huggingface.co/kohya-ss/ControlNet-diff-modules/resolve/main/diff_control_sd15_depth_fp16.safetensors?download=true",
"directory": "controlnet"
}
]
}

View File

@@ -0,0 +1,371 @@
{
"last_node_id": 34,
"last_link_id": 49,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1053, 172],
"size": [210, 46],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 49
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [259, 463],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [832, 384, 1]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [169, 212],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 47
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2)"
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1310, 169],
"size": [516.05, 301.24],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [699, 167],
"size": [315, 262],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 46
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
891858402356003,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 23,
"type": "ControlNetApply",
"pos": [553, -289],
"size": [317.4, 98],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 42
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 44
},
{
"name": "image",
"type": "IMAGE",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1.0]
},
{
"id": 31,
"type": "ControlNetLoader",
"pos": [168, -286],
"size": [345, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [44],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["t2iadapter_depth_sd14v1.pth"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [88, -174],
"size": [413, 314],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [34],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["shark_depthmap.png", "image"]
},
{
"id": 33,
"type": "CheckpointLoaderSimple",
"pos": [-349, 161],
"size": [315, 98],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [46],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [47, 48],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [49],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-360, -261],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 48
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["underwater photograph shark\n\n\n\n"]
},
{
"id": 34,
"type": "MarkdownNote",
"pos": [-345, 300],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#t2i-adapter-vs-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[34, 20, 0, 23, 2, "IMAGE"],
[40, 23, 0, 3, 1, "CONDITIONING"],
[42, 24, 0, 23, 0, "CONDITIONING"],
[44, 31, 0, 23, 1, "CONTROL_NET"],
[46, 33, 0, 3, 0, "MODEL"],
[47, 33, 1, 7, 0, "CLIP"],
[48, 33, 1, 24, 0, "CLIP"],
[49, 33, 2, 8, 1, "VAE"]
],
"groups": [
{
"id": 1,
"title": "Apply Depth T2I-Adapter",
"bounding": [150, -375, 739, 336],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.84,
"offset": [737.68, 680.26]
}
},
"version": 0.4,
"models": [
{
"name": "t2iadapter_depth_sd14v1.pth",
"url": "https://huggingface.co/TencentARC/T2I-Adapter/resolve/main/models/t2iadapter_depth_sd14v1.pth?download=true",
"directory": "controlnet"
},
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,267 @@
{
"last_node_id": 10,
"last_link_id": 9,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph in the style of embedding:SDA768.pt girl with blonde hair\nlandscape scenery view"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [26, 474],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [469, 528],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
193694018275622,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 10,
"type": "MarkdownNote",
"pos": [30, 630],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/textual_inversion_embeddings/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[3, 4, 1, 6, 0, "CLIP"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[5, 4, 1, 7, 0, "CLIP"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.84,
"offset": [498.31, 149.5]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,329 @@
{
"last_node_id": 15,
"last_link_id": 19,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 18
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 17
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece best quality girl standing in victorian clothing"
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 19
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1791, 169],
"size": [455.99, 553.09],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 16
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 14,
"type": "ImageUpscaleWithModel",
"pos": [1506, 151],
"size": [241.8, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "upscale_model",
"type": "UPSCALE_MODEL",
"link": 14
},
{
"name": "image",
"type": "IMAGE",
"link": 15
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [16],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageUpscaleWithModel"
},
"widgets_values": []
},
{
"id": 13,
"type": "UpscaleModelLoader",
"pos": [1128, 51],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "UPSCALE_MODEL",
"type": "UPSCALE_MODEL",
"links": [14],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UpscaleModelLoader"
},
"widgets_values": ["RealESRGAN_x4plus.pth"]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
833543590226030,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-11, 307],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [17, 18],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [19],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 15,
"type": "MarkdownNote",
"pos": [0, 465],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/upscale_models/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[14, 13, 0, 14, 0, "UPSCALE_MODEL"],
[15, 8, 0, 14, 1, "IMAGE"],
[16, 14, 0, 9, 0, "IMAGE"],
[17, 4, 1, 6, 0, "CLIP"],
[18, 4, 1, 7, 0, "CLIP"],
[19, 4, 2, 8, 1, "VAE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.82,
"offset": [400.67, 431.06]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "RealESRGAN_x4plus.pth",
"url": "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth",
"directory": "upscale_models"
}
]
}

View File

@@ -0,0 +1,484 @@
{
"last_node_id": 36,
"last_link_id": 70,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [68],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 17,
"type": "LoadImage",
"pos": [220, 530],
"size": [315, 314.0],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [49],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 19,
"type": "PreviewImage",
"pos": [899, 532],
"size": [571.59, 625.53],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 26
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [1290, 40],
"size": [315, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 57
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 64
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 65
},
{
"name": "latent_image",
"type": "LATENT",
"link": 66
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
50363905047731,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 35,
"type": "InstructPixToPixConditioning",
"pos": [1040, 50],
"size": [235.2, 86],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 67
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 68
},
{
"name": "vae",
"type": "VAE",
"link": 69
},
{
"name": "pixels",
"type": "IMAGE",
"link": 70
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [64],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [65],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [66],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InstructPixToPixConditioning"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 40],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1850, 40],
"size": [828.95, 893.85],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 32,
"type": "VAELoader",
"pos": [1290, 350],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 69],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [700, 50],
"size": [317.4, 58],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [67],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [30]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [260, 50],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a pink sweater and jeans"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-80, 110],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [710, -80],
"size": [315, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-canny-dev.safetensors", "default"]
},
{
"id": 18,
"type": "Canny",
"pos": [560, 530],
"size": [315, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 49
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [26, 70],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "Canny"
},
"widgets_values": [0.15, 0.3]
},
{
"id": 36,
"type": "MarkdownNote",
"pos": [-75, 270],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#canny-and-depth)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[26, 18, 0, 19, 0, "IMAGE"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[49, 17, 0, 18, 0, "IMAGE"],
[57, 31, 0, 3, 0, "MODEL"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[64, 35, 0, 3, 1, "CONDITIONING"],
[65, 35, 1, 3, 2, "CONDITIONING"],
[66, 35, 2, 3, 3, "LATENT"],
[67, 26, 0, 35, 0, "CONDITIONING"],
[68, 7, 0, 35, 1, "CONDITIONING"],
[69, 32, 0, 35, 2, "VAE"],
[70, 18, 0, 35, 3, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.67,
"offset": [553.16, 455.34]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "clip"
},
{
"name": "flux1-canny-dev-lora.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Canny-dev-lora/resolve/main/flux1-canny-dev-lora.safetensors?download=true",
"directory": "loras"
},
{
"name": "flux1-dev-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-dev/resolve/main/flux1-dev-fp8.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-canny-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Canny-dev/resolve/main/flux1-canny-dev.safetensors?download=true",
"directory": "diffusion_models"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "clip"
}
]
}

View File

@@ -0,0 +1,459 @@
{
"last_node_id": 40,
"last_link_id": 76,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 98],
"size": [210, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [68],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-238, 112],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 17,
"type": "LoadImage",
"pos": [307, 342],
"size": [315, 314.0],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [71],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["shark_depthmap.png", "image"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [621, 8],
"size": [317.4, 58],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [67],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [10]
},
{
"id": 35,
"type": "InstructPixToPixConditioning",
"pos": [1018, 124],
"size": [235.2, 86],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 67
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 68
},
{
"name": "vae",
"type": "VAE",
"link": 69
},
{
"name": "pixels",
"type": "IMAGE",
"link": 71
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [64],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [65],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [73],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InstructPixToPixConditioning"
},
"widgets_values": []
},
{
"id": 32,
"type": "VAELoader",
"pos": [656, 165],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 69],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1865, 98],
"size": [722.41, 425.77],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 37,
"type": "LoraLoaderModelOnly",
"pos": [624, -172],
"size": [315, 82],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 74
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [76],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": ["flux1-depth-dev-lora.safetensors", 1]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [115, -17],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["a photograph of a shark in the sea"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 3,
"type": "KSampler",
"pos": [1280, 100],
"size": [315, 262],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 76
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 64
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 65
},
{
"name": "latent_image",
"type": "LATENT",
"link": 73
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
91050358797301,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [249, -171],
"size": [315, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [74],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-depth-dev.safetensors", "default"]
},
{
"id": 40,
"type": "MarkdownNote",
"pos": [-225, 270],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#canny-and-depth)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[64, 35, 0, 3, 1, "CONDITIONING"],
[65, 35, 1, 3, 2, "CONDITIONING"],
[67, 26, 0, 35, 0, "CONDITIONING"],
[68, 7, 0, 35, 1, "CONDITIONING"],
[69, 32, 0, 35, 2, "VAE"],
[71, 17, 0, 35, 3, "IMAGE"],
[73, 35, 2, 3, 3, "LATENT"],
[74, 31, 0, 37, 0, "MODEL"],
[76, 37, 0, 3, 0, "MODEL"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.65,
"offset": [724.57, 776.23]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "clip"
},
{
"name": "flux1-dev-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-dev/resolve/main/flux1-dev-fp8.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-depth-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Depth-dev/resolve/main/flux1-depth-dev.safetensors?download=true",
"directory": "diffusion_models"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "clip"
},
{
"name": "flux1-depth-dev-lora.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Depth-dev-lora/resolve/main/flux1-depth-dev-lora.safetensors?download=true",
"directory": "loras"
}
]
}

View File

@@ -0,0 +1,332 @@
{
"last_node_id": 37,
"last_link_id": 57,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 192],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 45
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [56],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a maid outfit with a long black gold leaf pattern dress and a white apron mouth open placing a fancy black forest cake with candles on top of a dinner table of an old dark Victorian mansion lit by candlelight with a bright window to the foggy forest and very expensive stuff everywhere there are paintings on the walls"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1151, 195],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 52
},
{
"name": "vae",
"type": "VAE",
"link": 46
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1375, 194],
"size": [985.3, 1060.38],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [471, 455],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 30,
"type": "CheckpointLoaderSimple",
"pos": [48, 192],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [47],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [45, 54],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [46],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["flux1-dev-fp8.safetensors"]
},
{
"id": 31,
"type": "KSampler",
"pos": [816, 192],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 47
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 57
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 55
},
{
"name": "latent_image",
"type": "LATENT",
"link": 51
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [52],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
972054013131368,
"randomize",
20,
1,
"euler",
"simple",
1
]
},
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [390, 400],
"size": [422.85, 164.31],
"flags": {
"collapsed": true
},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 54,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [55],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 35,
"type": "FluxGuidance",
"pos": [576, 96],
"size": [211.6, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 56
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [3.5]
},
{
"id": 37,
"type": "MarkdownNote",
"pos": [60, 345],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-dev-1)"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 34,
"type": "Note",
"pos": [825, 510],
"size": [282.86, 164.08],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"Note that Flux dev and schnell do not have any negative prompt so CFG should be set to 1.0. Setting CFG to 1.0 means the negative prompt is ignored."
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[45, 30, 1, 6, 0, "CLIP"],
[46, 30, 2, 8, 1, "VAE"],
[47, 30, 0, 31, 0, "MODEL"],
[51, 27, 0, 31, 3, "LATENT"],
[52, 31, 0, 8, 0, "LATENT"],
[54, 30, 1, 33, 0, "CLIP"],
[55, 33, 0, 31, 2, "CONDITIONING"],
[56, 6, 0, 35, 0, "CONDITIONING"],
[57, 35, 0, 31, 1, "CONDITIONING"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.8,
"offset": [350.72, 161.55]
}
},
"version": 0.4,
"models": [
{
"name": "flux1-dev-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-dev/resolve/main/flux1-dev-fp8.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,771 @@
{
"last_node_id": 38,
"last_link_id": 116,
"nodes": [
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [48, 288],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"t5xxl_fp16.safetensors",
"clip_l.safetensors",
"flux",
"default"
]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [480, 1008],
"size": [315, 106],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 55,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [20]
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": ["simple", 20, 1]
},
{
"id": 16,
"type": "KSamplerSelect",
"pos": [480, 912],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [19]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [480, 144],
"size": [317.4, 58],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [3.5],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 22,
"type": "BasicGuider",
"pos": [576, 48],
"size": [222.35, 46],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 42,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [864, 192],
"size": [272.36, 124.54],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 116,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [24],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 25,
"type": "RandomNoise",
"pos": [480, 768],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [219670278747233, "randomize"],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [866, 367],
"size": [210, 46],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 24
},
{
"name": "vae",
"type": "VAE",
"link": 12
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 240],
"size": [422.85, 164.31],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 10
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"cute anime girl with massive fluffy fennec ears and a big fluffy tail blonde messy long hair blue eyes wearing a maid outfit with a long black gold leaf pattern dress and a white apron mouth open holding a fancy black forest cake with candles on top in the kitchen of an old dark Victorian mansion lit by candlelight with a bright window to the foggy forest and very expensive stuff everywhere"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 30,
"type": "ModelSamplingFlux",
"pos": [480, 1152],
"size": [315, 130],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56,
"slot_index": 0
},
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 115,
"slot_index": 1
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 114,
"slot_index": 2
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [54, 55],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingFlux"
},
"widgets_values": [1.15, 0.5, 1024, 1024]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": [315, 106],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 112
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 113
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [116],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 34,
"type": "PrimitiveNode",
"pos": [432, 480],
"size": [210, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "width"
},
"links": [112, 115],
"slot_index": 0
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 35,
"type": "PrimitiveNode",
"pos": [672, 480],
"size": [210, 82],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "height"
},
"links": [113, 114],
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [48, 144],
"size": [315, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [56],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-dev-fp8.safetensors", "default"],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 9,
"type": "SaveImage",
"pos": [1155, 196],
"size": [985.3, 1060.38],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 37,
"type": "Note",
"pos": [480, 1344],
"size": [315.0, 117.98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"The reference sampling implementation auto adjusts the shift value based on the resolution, if you don't want this you can just bypass (CTRL-B) this ModelSamplingFlux node.\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "VAELoader",
"pos": [48, 432],
"size": [311.82, 60.43],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 28,
"type": "Note",
"pos": [48, 576],
"size": [336, 288],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"If you get an error in any of the nodes above make sure the files are in the correct directories.\n\nSee the top of the examples page for the links : https://comfyanonymous.github.io/ComfyUI_examples/flux/\n\nflux1-dev-fp8.safetensors goes in: ComfyUI/models/unet/\n\nt5xxl_fp16.safetensors and clip_l.safetensors go in: ComfyUI/models/clip/\n\nae.safetensors goes in: ComfyUI/models/vae/\n\n\nTip: You can set the weight_dtype above to one of the fp8 types if you have memory issues."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 38,
"type": "MarkdownNote",
"pos": [45, 930],
"size": [225, 60],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-dev-1)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[10, 11, 0, 6, 0, "CLIP"],
[12, 10, 0, 8, 1, "VAE"],
[19, 16, 0, 13, 2, "SAMPLER"],
[20, 17, 0, 13, 3, "SIGMAS"],
[24, 13, 0, 8, 0, "LATENT"],
[30, 22, 0, 13, 1, "GUIDER"],
[37, 25, 0, 13, 0, "NOISE"],
[41, 6, 0, 26, 0, "CONDITIONING"],
[42, 26, 0, 22, 1, "CONDITIONING"],
[54, 30, 0, 22, 0, "MODEL"],
[55, 30, 0, 17, 0, "MODEL"],
[56, 12, 0, 30, 0, "MODEL"],
[112, 34, 0, 27, 0, "INT"],
[113, 35, 0, 27, 1, "INT"],
[114, 35, 0, 30, 2, "INT"],
[115, 34, 0, 30, 1, "INT"],
[116, 27, 0, 13, 4, "LATENT"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [-0.18, 2.29]
},
"groupNodes": {
"EmptyLatentImage": {
"nodes": [
{
"type": "PrimitiveNode",
"pos": [432, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 6,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"widget": {
"name": "height"
},
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 0
},
{
"type": "PrimitiveNode",
"pos": [672, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 7,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"slot_index": 0,
"widget": {
"name": "width"
}
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 1
},
{
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"link": null,
"widget": {
"name": "width"
}
},
{
"name": "height",
"type": "INT",
"link": null,
"widget": {
"name": "height"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"index": 2
}
],
"links": [
[1, 0, 2, 0, 34, "INT"],
[0, 0, 2, 1, 35, "INT"]
],
"external": [
[0, 0, "INT"],
[1, 0, "INT"],
[2, 0, "LATENT"]
],
"config": {
"0": {
"output": {
"0": {
"name": "height"
}
},
"input": {
"value": {
"visible": true
}
}
},
"1": {
"output": {
"0": {
"name": "width"
}
},
"input": {
"value": {
"visible": true
}
}
},
"2": {
"input": {
"width": {
"visible": false
},
"height": {
"visible": false
}
}
}
}
}
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "clip"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "clip"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-dev-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-dev/resolve/main/flux1-dev-fp8.safetensors?download=true",
"directory": "diffusion_models"
}
]
}

View File

@@ -0,0 +1,458 @@
{
"last_node_id": 45,
"last_link_id": 100,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [81],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 32,
"type": "VAELoader",
"pos": [1352, 422],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 82],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [593, 44],
"size": [317.4, 58],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [80],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [30]
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-237, 79],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 39,
"type": "DifferentialDiffusion",
"pos": [1001, -68],
"size": [277.2, 26],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 85
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [86],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DifferentialDiffusion"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 98],
"size": [210, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 38,
"type": "InpaintModelConditioning",
"pos": [952, 78],
"size": [302.4, 138],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 80
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 81
},
{
"name": "vae",
"type": "VAE",
"link": 82
},
{
"name": "pixels",
"type": "IMAGE",
"link": 99
},
{
"name": "mask",
"type": "MASK",
"link": 100
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [77],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [78],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [88],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InpaintModelConditioning"
},
"widgets_values": [false]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1877, 101],
"size": [828.95, 893.85],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 95
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [1280, 100],
"size": [315, 262],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 86
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 77
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 78
},
{
"name": "latent_image",
"type": "LATENT",
"link": 88
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
656821733471329,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [602, -120],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [85],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-fill-dev.safetensors", "default"]
},
{
"id": 17,
"type": "LoadImage",
"pos": [587, 312],
"size": [315, 314.0],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [99],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [100],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_inpaint_example.png", "image"]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [144, -7],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"anime girl with massive fennec ears blonde hair blue eyes wearing a pink shirt"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 45,
"type": "MarkdownNote",
"pos": [-225, 255],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#fill-inpainting-model)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[77, 38, 0, 3, 1, "CONDITIONING"],
[78, 38, 1, 3, 2, "CONDITIONING"],
[80, 26, 0, 38, 0, "CONDITIONING"],
[81, 7, 0, 38, 1, "CONDITIONING"],
[82, 32, 0, 38, 2, "VAE"],
[85, 31, 0, 39, 0, "MODEL"],
[86, 39, 0, 3, 0, "MODEL"],
[88, 38, 2, 3, 3, "LATENT"],
[95, 8, 0, 9, 0, "IMAGE"],
[99, 17, 0, 38, 3, "IMAGE"],
[100, 17, 1, 38, 4, "MASK"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.21,
"offset": [566.62, 207.73]
}
},
"version": 0.4,
"models": [
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "clip"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "clip"
},
{
"name": "flux1-fill-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Fill-dev/blob/main/flux1-fill-dev.safetensors",
"directory": "diffusion_models"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,491 @@
{
"last_node_id": 45,
"last_link_id": 98,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [307, 282],
"size": [425.28, 180.61],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 63
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [81],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 32,
"type": "VAELoader",
"pos": [1352, 422],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [60, 82],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [593, 44],
"size": [317.4, 58],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [80],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [30]
},
{
"id": 34,
"type": "DualCLIPLoader",
"pos": [-237, 79],
"size": [315, 106],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [62, 63]
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"t5xxl_fp16.safetensors",
"flux",
"default"
]
},
{
"id": 39,
"type": "DifferentialDiffusion",
"pos": [1001, -68],
"size": [277.2, 26],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 85
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [86],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DifferentialDiffusion"
},
"widgets_values": []
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1620, 98],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 60
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 38,
"type": "InpaintModelConditioning",
"pos": [952, 78],
"size": [302.4, 138],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 80
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 81
},
{
"name": "vae",
"type": "VAE",
"link": 82
},
{
"name": "pixels",
"type": "IMAGE",
"link": 97
},
{
"name": "mask",
"type": "MASK",
"link": 98
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [77],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [78],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [88],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "InpaintModelConditioning"
},
"widgets_values": [false]
},
{
"id": 44,
"type": "ImagePadForOutpaint",
"pos": [415, 359],
"size": [315, 174],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 96
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [97],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [98],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ImagePadForOutpaint"
},
"widgets_values": [400, 0, 400, 400, 24]
},
{
"id": 23,
"type": "CLIPTextEncode",
"pos": [144, -7],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 62
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["beautiful scenery"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 9,
"type": "SaveImage",
"pos": [1877, 101],
"size": [828.95, 893.85],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 95
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [1280, 100],
"size": [315, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 86
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 77
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 78
},
{
"name": "latent_image",
"type": "LATENT",
"link": 88
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
164211176398261,
"randomize",
20,
1,
"euler",
"normal",
1
]
},
{
"id": 17,
"type": "LoadImage",
"pos": [23, 376],
"size": [315, 314.0],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [96],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 31,
"type": "UNETLoader",
"pos": [602, -120],
"size": [315, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [85],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-fill-dev.safetensors", "default"]
},
{
"id": 45,
"type": "MarkdownNote",
"pos": [-225, 255],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#fill-inpainting-model)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[41, 23, 0, 26, 0, "CONDITIONING"],
[60, 32, 0, 8, 1, "VAE"],
[62, 34, 0, 23, 0, "CLIP"],
[63, 34, 0, 7, 0, "CLIP"],
[77, 38, 0, 3, 1, "CONDITIONING"],
[78, 38, 1, 3, 2, "CONDITIONING"],
[80, 26, 0, 38, 0, "CONDITIONING"],
[81, 7, 0, 38, 1, "CONDITIONING"],
[82, 32, 0, 38, 2, "VAE"],
[85, 31, 0, 39, 0, "MODEL"],
[86, 39, 0, 3, 0, "MODEL"],
[88, 38, 2, 3, 3, "LATENT"],
[95, 8, 0, 9, 0, "IMAGE"],
[96, 17, 0, 44, 0, "IMAGE"],
[97, 44, 0, 38, 3, "IMAGE"],
[98, 44, 1, 38, 4, "MASK"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1,
"offset": [240.64, 211.87]
}
},
"version": 0.4,
"models": [
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "clip"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "clip"
},
{
"name": "flux1-fill-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Fill-dev/blob/main/flux1-fill-dev.safetensors",
"directory": "diffusion_models"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,956 @@
{
"last_node_id": 44,
"last_link_id": 123,
"nodes": [
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [48, 288],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"t5xxl_fp16.safetensors",
"clip_l.safetensors",
"flux",
"default"
]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [480, 1008],
"size": [315, 106],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 55,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [20]
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": ["simple", 20, 1]
},
{
"id": 16,
"type": "KSamplerSelect",
"pos": [480, 912],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [19]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [480, 144],
"size": [317.4, 58],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 41
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [122],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [3.5],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [864, 192],
"size": [272.36, 124.54],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 116,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [24],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 25,
"type": "RandomNoise",
"pos": [480, 768],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [958831004022715, "randomize"],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [866, 367],
"size": [210, 46],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 24
},
{
"name": "vae",
"type": "VAE",
"link": 12
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 30,
"type": "ModelSamplingFlux",
"pos": [480, 1152],
"size": [315, 130],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56,
"slot_index": 0
},
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 115,
"slot_index": 1
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 114,
"slot_index": 2
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [54, 55],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingFlux"
},
"widgets_values": [1.15, 0.5, 1024, 1024]
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": [315, 106],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"widget": {
"name": "width"
},
"link": 112
},
{
"name": "height",
"type": "INT",
"widget": {
"name": "height"
},
"link": 113
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [116],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 34,
"type": "PrimitiveNode",
"pos": [432, 480],
"size": [210, 82],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "width"
},
"links": [112, 115],
"slot_index": 0
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 35,
"type": "PrimitiveNode",
"pos": [672, 480],
"size": [210, 82],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "height"
},
"links": [113, 114],
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [1024, "fixed"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [48, 144],
"size": [315, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [56],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["flux1-dev.safetensors", "default"],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 9,
"type": "SaveImage",
"pos": [1155, 196],
"size": [985.3, 1060.38],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 37,
"type": "Note",
"pos": [480, 1344],
"size": [315.0, 117.98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"The reference sampling implementation auto adjusts the shift value based on the resolution, if you don't want this you can just bypass (CTRL-B) this ModelSamplingFlux node.\n"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "VAELoader",
"pos": [48, 432],
"size": [311.82, 60.43],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["ae.safetensors"]
},
{
"id": 28,
"type": "Note",
"pos": [48, 576],
"size": [336, 288],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"If you get an error in any of the nodes above make sure the files are in the correct directories.\n\nSee the top of the examples page for the links : https://comfyanonymous.github.io/ComfyUI_examples/flux/\n\nflux1-dev.safetensors goes in: ComfyUI/models/unet/\n\nt5xxl_fp16.safetensors and clip_l.safetensors go in: ComfyUI/models/clip/\n\nae.safetensors goes in: ComfyUI/models/vae/\n\n\nTip: You can set the weight_dtype above to one of the fp8 types if you have memory issues."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 39,
"type": "CLIPVisionEncode",
"pos": [420, -300],
"size": [290, 78],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 117
},
{
"name": "image",
"type": "IMAGE",
"link": 118
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [120],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 40,
"type": "LoadImage",
"pos": [60, -300],
"size": [315, 314],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [118]
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 42,
"type": "StyleModelLoader",
"pos": [400, -180],
"size": [340, 60],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STYLE_MODEL",
"type": "STYLE_MODEL",
"links": [119]
}
],
"properties": {
"Node name for S&R": "StyleModelLoader"
},
"widgets_values": ["flux1-redux-dev.safetensors"]
},
{
"id": 38,
"type": "CLIPVisionLoader",
"pos": [60, -410],
"size": [370, 60],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"links": [117],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": ["sigclip_vision_patch14_384.safetensors"]
},
{
"id": 41,
"type": "StyleModelApply",
"pos": [760, -300],
"size": [320, 122],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 122
},
{
"name": "style_model",
"type": "STYLE_MODEL",
"link": 119
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"shape": 7,
"link": 120
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [123],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "StyleModelApply"
},
"widgets_values": [1, "multiply"]
},
{
"id": 22,
"type": "BasicGuider",
"pos": [960, 66],
"size": [222.35, 46],
"flags": {},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 123,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 43,
"type": "Note",
"pos": [1130, -440],
"size": [345.9, 182.31],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"The redux model lets you prompt with images. It can be used with any Flux1 dev or schnell model workflow.\n\nYou can chain multiple \"Apply Style Model\" nodes if you want to mix multiple images together."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 240],
"size": [422.85, 164.31],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 10
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [41],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["cute anime girl with massive fluffy fennec ears"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 44,
"type": "MarkdownNote",
"pos": [60, 915],
"size": [225, 60],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#redux)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[10, 11, 0, 6, 0, "CLIP"],
[12, 10, 0, 8, 1, "VAE"],
[19, 16, 0, 13, 2, "SAMPLER"],
[20, 17, 0, 13, 3, "SIGMAS"],
[24, 13, 0, 8, 0, "LATENT"],
[30, 22, 0, 13, 1, "GUIDER"],
[37, 25, 0, 13, 0, "NOISE"],
[41, 6, 0, 26, 0, "CONDITIONING"],
[54, 30, 0, 22, 0, "MODEL"],
[55, 30, 0, 17, 0, "MODEL"],
[56, 12, 0, 30, 0, "MODEL"],
[112, 34, 0, 27, 0, "INT"],
[113, 35, 0, 27, 1, "INT"],
[114, 35, 0, 30, 2, "INT"],
[115, 34, 0, 30, 1, "INT"],
[116, 27, 0, 13, 4, "LATENT"],
[117, 38, 0, 39, 0, "CLIP_VISION"],
[118, 40, 0, 39, 1, "IMAGE"],
[119, 42, 0, 41, 1, "STYLE_MODEL"],
[120, 39, 0, 41, 2, "CLIP_VISION_OUTPUT"],
[122, 26, 0, 41, 0, "CONDITIONING"],
[123, 41, 0, 22, 1, "CONDITIONING"]
],
"groups": [
{
"id": 1,
"title": "Redux Model",
"bounding": [45, -480, 1040, 507.6],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.9,
"offset": [139.8, 57.78]
},
"groupNodes": {
"EmptyLatentImage": {
"nodes": [
{
"type": "PrimitiveNode",
"pos": [432, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 6,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"widget": {
"name": "height"
},
"slot_index": 0
}
],
"title": "height",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 0
},
{
"type": "PrimitiveNode",
"pos": [672, 480],
"size": {
"0": 210,
"1": 82
},
"flags": {},
"order": 7,
"mode": 0,
"outputs": [
{
"name": "INT",
"type": "INT",
"links": [],
"slot_index": 0,
"widget": {
"name": "width"
}
}
],
"title": "width",
"properties": {
"Run widget replace on values": false
},
"color": "#323",
"bgcolor": "#535",
"index": 1
},
{
"type": "EmptySD3LatentImage",
"pos": [480, 624],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "width",
"type": "INT",
"link": null,
"widget": {
"name": "width"
}
},
{
"name": "height",
"type": "INT",
"link": null,
"widget": {
"name": "height"
}
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"index": 2
}
],
"links": [
[1, 0, 2, 0, 34, "INT"],
[0, 0, 2, 1, 35, "INT"]
],
"external": [
[0, 0, "INT"],
[1, 0, "INT"],
[2, 0, "LATENT"]
],
"config": {
"0": {
"output": {
"0": {
"name": "height"
}
},
"input": {
"value": {
"visible": true
}
}
},
"1": {
"output": {
"0": {
"name": "width"
}
},
"input": {
"value": {
"visible": true
}
}
},
"2": {
"input": {
"width": {
"visible": false
},
"height": {
"visible": false
}
}
}
}
}
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "clip"
},
{
"name": "flux1-dev-fp8.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/flux1-dev.safetensors?download=true",
"directory": "diffusion_models"
},
{
"name": "sigclip_vision_patch14_384.safetensors",
"url": "https://huggingface.co/Comfy-Org/sigclip_vision_384/resolve/main/sigclip_vision_patch14_384.safetensors?download=true",
"directory": "clip_vision"
},
{
"name": "flux1-dev-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-dev/resolve/main/flux1-dev-fp8.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "ae.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-schnell/resolve/main/ae.safetensors?download=true",
"directory": "vae"
},
{
"name": "flux1-redux-dev.safetensors",
"url": "https://huggingface.co/black-forest-labs/FLUX.1-Redux-dev/resolve/main/flux1-redux-dev.safetensors",
"directory": "style_models"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "clip"
}
]
}

View File

@@ -1,420 +1,302 @@
{
"last_node_id": 36,
"last_link_id": 58,
"nodes": [
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [
390,
400
],
"size": {
"0": 422.84503173828125,
"1": 164.31304931640625
},
"flags": {
"collapsed": true
},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 54,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
55
],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
""
],
"color": "#322",
"bgcolor": "#533"
"last_node_id": 37,
"last_link_id": 58,
"nodes": [
{
"id": 33,
"type": "CLIPTextEncode",
"pos": [390, 400],
"size": [422.85, 164.31],
"flags": {
"collapsed": true
},
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [
471,
455
],
"size": {
"0": 315,
"1": 106
},
"flags": {},
"order": 0,
"mode": 0,
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
51
],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [
1024,
1024,
1
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
1151,
195
],
"size": {
"0": 210,
"1": 46
},
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 52
},
{
"name": "vae",
"type": "VAE",
"link": 46
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
9
],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 54,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [55],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
{
"id": 9,
"type": "SaveImage",
"pos": [
1375,
194
],
"size": {
"0": 985.3012084960938,
"1": 1060.3828125
},
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"properties": {},
"widgets_values": [
"ComfyUI"
]
},
{
"id": 31,
"type": "KSampler",
"pos": [
816,
192
],
"size": {
"0": 315,
"1": 262
},
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 47
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 58
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 55
},
{
"name": "latent_image",
"type": "LATENT",
"link": 51
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
52
],
"shape": 3,
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
173805153958730,
"randomize",
4,
1,
"euler",
"simple",
1
]
},
{
"id": 30,
"type": "CheckpointLoaderSimple",
"pos": [
48,
192
],
"size": {
"0": 315,
"1": 98
},
"flags": {},
"order": 1,
"mode": 0,
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
47
],
"shape": 3,
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [
45,
54
],
"shape": 3,
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [
46
],
"shape": 3,
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": [
"flux1-schnell-fp8.safetensors"
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
384,
192
],
"size": {
"0": 422.84503173828125,
"1": 164.31304931640625
},
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 45
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [
58
],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a bottle with a beautiful rainbow galaxy inside it on top of a wooden table in the middle of a modern kitchen beside a plate of vegetables and mushrooms and a wine glasse that contains a planet earth with a plate with a half eaten apple pie on it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 34,
"type": "Note",
"pos": [
831,
501
],
"size": {
"0": 282.8617858886719,
"1": 164.08004760742188
},
"flags": {},
"order": 2,
"mode": 0,
"properties": {
"text": ""
},
"widgets_values": [
"Note that Flux dev and schnell do not have any negative prompt so CFG should be set to 1.0. Setting CFG to 1.0 means the negative prompt is ignored.\n\nThe schnell model is a distilled model that can generate a good image with only 4 steps."
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[
9,
8,
0,
9,
0,
"IMAGE"
],
[
45,
30,
1,
6,
0,
"CLIP"
],
[
46,
30,
2,
8,
1,
"VAE"
],
[
47,
30,
0,
31,
0,
"MODEL"
],
[
51,
27,
0,
31,
3,
"LATENT"
],
[
52,
31,
0,
8,
0,
"LATENT"
],
[
54,
30,
1,
33,
0,
"CLIP"
],
[
55,
33,
0,
31,
2,
"CONDITIONING"
],
[
58,
6,
0,
31,
1,
"CONDITIONING"
]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [
0.6836674124529055,
1.8290357611967831
]
}
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
"models": [
{
"name": "flux1-schnell-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-schnell/resolve/main/flux1-schnell-fp8.safetensors?download=true",
"directory": "checkpoints"
}
],
"version": 0.4
}
{
"id": 27,
"type": "EmptySD3LatentImage",
"pos": [471, 455],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1151, 195],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 52
},
{
"name": "vae",
"type": "VAE",
"link": 46
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1375, 194],
"size": [985.3, 1060.38],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 31,
"type": "KSampler",
"pos": [816, 192],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 47
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 58
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 55
},
{
"name": "latent_image",
"type": "LATENT",
"link": 51
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [52],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
173805153958730,
"randomize",
4,
1,
"euler",
"simple",
1
]
},
{
"id": 30,
"type": "CheckpointLoaderSimple",
"pos": [48, 192],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [47],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [45, 54],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [46],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["flux1-schnell-fp8.safetensors"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 192],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 45
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a bottle with a beautiful rainbow galaxy inside it on top of a wooden table in the middle of a modern kitchen beside a plate of vegetables and mushrooms and a wine glasse that contains a planet earth with a plate with a half eaten apple pie on it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 34,
"type": "Note",
"pos": [831, 501],
"size": [282.86, 164.08],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"Note that Flux dev and schnell do not have any negative prompt so CFG should be set to 1.0. Setting CFG to 1.0 means the negative prompt is ignored.\n\nThe schnell model is a distilled model that can generate a good image with only 4 steps."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 37,
"type": "MarkdownNote",
"pos": [45, 345],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-schnell-1)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[9, 8, 0, 9, 0, "IMAGE"],
[45, 30, 1, 6, 0, "CLIP"],
[46, 30, 2, 8, 1, "VAE"],
[47, 30, 0, 31, 0, "MODEL"],
[51, 27, 0, 31, 3, "LATENT"],
[52, 31, 0, 8, 0, "LATENT"],
[54, 30, 1, 33, 0, "CLIP"],
[55, 33, 0, 31, 2, "CONDITIONING"],
[58, 6, 0, 31, 1, "CONDITIONING"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [0.68, 1.83]
}
},
"version": 0.4,
"models": [
{
"name": "flux1-schnell-fp8.safetensors",
"url": "https://huggingface.co/Comfy-Org/flux1-schnell/resolve/main/flux1-schnell-fp8.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,376 @@
{
"last_node_id": 28,
"last_link_id": 79,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 1
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 77
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 57
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1023216319780679,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-260, -340],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [69],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph scenery landscape, snow beautiful scenery mountain, glass bottle; purple galaxy bottle; sun"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [300, 230],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1495, 167],
"size": [493.63, 561.54],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["gligen/testing"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [410, 460],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 27,
"type": "GLIGENTextBoxApply",
"pos": [770, -340],
"size": [437.22, 382.68],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "conditioning_to",
"type": "CONDITIONING",
"link": 78
},
{
"name": "clip",
"type": "CLIP",
"link": 74
},
{
"name": "gligen_textbox_model",
"type": "GLIGEN",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [77],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "GLIGENTextBoxApply"
},
"widgets_values": ["sun", 144, 144, 416, 16]
},
{
"id": 21,
"type": "GLIGENTextBoxApply",
"pos": [270, -340],
"size": [437.22, 382.68],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "conditioning_to",
"type": "CONDITIONING",
"link": 69
},
{
"name": "clip",
"type": "CLIP",
"link": 53
},
{
"name": "gligen_textbox_model",
"type": "GLIGEN",
"link": 54
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [65, 78],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "GLIGENTextBoxApply"
},
"widgets_values": ["purple galaxy bottle", 192, 304, 176, 272]
},
{
"id": 10,
"type": "GLIGENLoader",
"pos": [-230, -70],
"size": [390, 60],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "GLIGEN",
"type": "GLIGEN",
"links": [54, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "GLIGENLoader"
},
"widgets_values": ["gligen_sd14_textbox_pruned.safetensors"]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-220, 130],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [5, 53, 67, 74],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [79],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 28,
"type": "MarkdownNote",
"pos": [-210, 285],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/gligen/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[5, 4, 1, 7, 0, "CLIP"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[53, 4, 1, 21, 1, "CLIP"],
[54, 10, 0, 21, 2, "GLIGEN"],
[57, 7, 0, 3, 2, "CONDITIONING"],
[67, 4, 1, 24, 0, "CLIP"],
[69, 24, 0, 21, 0, "CONDITIONING"],
[74, 4, 1, 27, 1, "CLIP"],
[75, 10, 0, 27, 2, "GLIGEN"],
[77, 27, 0, 3, 1, "CONDITIONING"],
[78, 21, 0, 27, 0, "CONDITIONING"],
[79, 4, 2, 8, 1, "VAE"]
],
"groups": [
{
"id": 1,
"title": "Base Prompt",
"bounding": [-315, -465, 518, 302],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "GLIGEN (for best results the elements should match some elements in the base prompt)",
"bounding": [255, -465, 980, 529],
"color": "#A88",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.8,
"offset": [433.59, 361.81]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "gligen_sd14_textbox_pruned.safetensors",
"url": "https://huggingface.co/comfyanonymous/GLIGEN_pruned_safetensors/resolve/main/gligen_sd14_textbox_pruned.safetensors?download=true",
"directory": "gligen"
}
]
}

View File

@@ -0,0 +1,607 @@
{
"last_node_id": 26,
"last_link_id": 35,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1185.5, 412.07],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 30
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 13,
"type": "VAEDecode",
"pos": [3221.22, 232.38],
"size": [210, 46],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 15
},
{
"name": "vae",
"type": "VAE",
"link": 33
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [81.78, 142.34],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 28
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4, 12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece HDR victorian portrait painting of woman, blonde hair, mountain nature, blue sky\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [84.78, 352.34],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 29
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6, 13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands, text, watermark\n"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [142.78, 571.34],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [552.78, 143.34],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 34
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 20],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
251225068430076,
"randomize",
12,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 21,
"type": "VAEDecode",
"pos": [988.18, 29.56],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 20
},
{
"name": "vae",
"type": "VAE",
"link": 32
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 20,
"type": "VAEEncode",
"pos": [2459.1, 103.02],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 26
},
{
"name": "vae",
"type": "VAE",
"link": 31
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [18],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncode"
},
"widgets_values": []
},
{
"id": 22,
"type": "ImageUpscaleWithModel",
"pos": [1631.06, 3.66],
"size": [226.8, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "upscale_model",
"type": "UPSCALE_MODEL",
"link": 24
},
{
"name": "image",
"type": "IMAGE",
"link": 23
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageUpscaleWithModel"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1446, 411],
"size": [611.26, 628.6],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 24,
"type": "ImageScale",
"pos": [1931, 10],
"size": [315, 130],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 27
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [26],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageScale"
},
"widgets_values": ["bilinear", 1536, 1536, "disabled"]
},
{
"id": 12,
"type": "SaveImage",
"pos": [3463, 230],
"size": [868.01, 936.97],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 17
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 11,
"type": "KSampler",
"pos": [2811.96, 176.22],
"size": [315, 262],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 35,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 12,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 13,
"slot_index": 2
},
{
"name": "latent_image",
"type": "LATENT",
"link": 18,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
783745448521451,
"randomize",
14,
8,
"uni_pc_bh2",
"normal",
0.5
]
},
{
"id": 25,
"type": "CheckpointLoaderSimple",
"pos": [-262, 284],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [34, 35],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [28, 29],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [30, 31, 32, 33],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 23,
"type": "UpscaleModelLoader",
"pos": [1288.06, -39.34],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "UPSCALE_MODEL",
"type": "UPSCALE_MODEL",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UpscaleModelLoader"
},
"widgets_values": ["RealESRGAN_x4plus.pth"]
},
{
"id": 26,
"type": "MarkdownNote",
"pos": [-300, 750],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/#non-latent-upscaling)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[12, 6, 0, 11, 1, "CONDITIONING"],
[13, 7, 0, 11, 2, "CONDITIONING"],
[15, 11, 0, 13, 0, "LATENT"],
[17, 13, 0, 12, 0, "IMAGE"],
[18, 20, 0, 11, 3, "LATENT"],
[20, 3, 0, 21, 0, "LATENT"],
[23, 21, 0, 22, 1, "IMAGE"],
[24, 23, 0, 22, 0, "UPSCALE_MODEL"],
[26, 24, 0, 20, 0, "IMAGE"],
[27, 22, 0, 24, 0, "IMAGE"],
[28, 25, 1, 6, 0, "CLIP"],
[29, 25, 1, 7, 0, "CLIP"],
[30, 25, 2, 8, 1, "VAE"],
[31, 25, 2, 20, 1, "VAE"],
[32, 25, 2, 21, 1, "VAE"],
[33, 25, 2, 13, 1, "VAE"],
[34, 25, 0, 3, 0, "MODEL"],
[35, 25, 0, 11, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Txt2Img",
"bounding": [-300, 0, 1211, 708],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Save Intermediate Image",
"bounding": [1170, 330, 516, 196],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Second pass",
"bounding": [2775, 90, 379, 429],
"color": "#444",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Save Final Image",
"bounding": [3210, 135, 483, 199],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "ESRGAN upscale with 4x model",
"bounding": [1260, -120, 578, 184],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "Decode to Pixel space",
"bounding": [960, -45, 285, 142],
"color": "#A88",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "Encode back to latent space",
"bounding": [2400, 15, 312, 157],
"color": "#A88",
"font_size": 24,
"flags": {}
},
{
"id": 8,
"title": "Downscale image to a more reasonable size",
"bounding": [1845, -75, 483, 245],
"color": "#8AA",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.71,
"offset": [448.42, 482.51]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "RealESRGAN_x4plus.pth",
"url": "https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth",
"directory": "upscale_models"
}
]
}

View File

@@ -0,0 +1,442 @@
{
"last_node_id": 17,
"last_link_id": 23,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1235.72, 577.19],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 21
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "LatentUpscale",
"pos": [1238, 170],
"size": [315, 130],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 10
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [14]
}
],
"properties": {
"Node name for S&R": "LatentUpscale"
},
"widgets_values": ["nearest-exact", 1152, 1152, "disabled"]
},
{
"id": 13,
"type": "VAEDecode",
"pos": [1961, 125],
"size": [210, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 15
},
{
"name": "vae",
"type": "VAE",
"link": 22
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [17],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [374, 171],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4, 12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"masterpiece HDR victorian portrait painting of woman, blonde hair, mountain nature, blue sky\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [377, 381],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6, 13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands, text, watermark\n"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [435, 600],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 11,
"type": "KSampler",
"pos": [1585, 114],
"size": [315, 262],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 12,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 13,
"slot_index": 2
},
{
"name": "latent_image",
"type": "LATENT",
"link": 14,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
469771404043268,
"randomize",
14,
8,
"dpmpp_2m",
"simple",
0.5
]
},
{
"id": 12,
"type": "SaveImage",
"pos": [2203, 123],
"size": [407.54, 468.13],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 17
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [845, 172],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 18
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
89848141647836,
"randomize",
12,
8,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 16,
"type": "CheckpointLoaderSimple",
"pos": [24, 315],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [18, 23],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [19, 20],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [21, 22],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v2-1_768-ema-pruned.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1495.72, 576.19],
"size": [232.94, 282.43],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 17,
"type": "MarkdownNote",
"pos": [0, 780],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 3, 0, 10, 0, "LATENT"],
[12, 6, 0, 11, 1, "CONDITIONING"],
[13, 7, 0, 11, 2, "CONDITIONING"],
[14, 10, 0, 11, 3, "LATENT"],
[15, 11, 0, 13, 0, "LATENT"],
[17, 13, 0, 12, 0, "IMAGE"],
[18, 16, 0, 3, 0, "MODEL"],
[19, 16, 1, 6, 0, "CLIP"],
[20, 16, 1, 7, 0, "CLIP"],
[21, 16, 2, 8, 1, "VAE"],
[22, 16, 2, 13, 1, "VAE"],
[23, 16, 0, 11, 0, "MODEL"]
],
"groups": [
{
"id": 1,
"title": "Txt2Img",
"bounding": [0, 30, 1211, 708],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Save Intermediate Image",
"bounding": [1230, 495, 516, 196],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Hires Fix",
"bounding": [1230, 30, 710, 464],
"color": "#b58b2a",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Save Final Image",
"bounding": [1950, 30, 483, 199],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.97,
"offset": [419.13, 209.33]
}
},
"version": 0.4,
"models": [
{
"name": "v2-1_768-ema-pruned.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-1/resolve/main/v2-1_768-ema-pruned.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,553 @@
{
"last_node_id": 78,
"last_link_id": 215,
"nodes": [
{
"id": 16,
"type": "KSamplerSelect",
"pos": [484, 751],
"size": [315, 58],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [19]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 17,
"type": "BasicScheduler",
"pos": [478, 860],
"size": [315, 106],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 190,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [20]
}
],
"properties": {
"Node name for S&R": "BasicScheduler"
},
"widgets_values": ["simple", 20, 1]
},
{
"id": 26,
"type": "FluxGuidance",
"pos": [520, 100],
"size": [317.4, 58],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 175
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [129],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "FluxGuidance"
},
"widgets_values": [6],
"color": "#233",
"bgcolor": "#355"
},
{
"id": 45,
"type": "EmptyHunyuanLatentVideo",
"pos": [475.54, 432.67],
"size": [315, 130],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [180],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyHunyuanLatentVideo"
},
"widgets_values": [848, 480, 73, 1]
},
{
"id": 22,
"type": "BasicGuider",
"pos": [600, 0],
"size": [222.35, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 195,
"slot_index": 0
},
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 129,
"slot_index": 1
}
],
"outputs": [
{
"name": "GUIDER",
"type": "GUIDER",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "BasicGuider"
},
"widgets_values": []
},
{
"id": 67,
"type": "ModelSamplingSD3",
"pos": [360, 0],
"size": [210, 58],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 209
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [195],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ModelSamplingSD3"
},
"widgets_values": [7]
},
{
"id": 10,
"type": "VAELoader",
"pos": [0, 420],
"size": [350, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [206, 211],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["hunyuan_video_vae_bf16.safetensors"]
},
{
"id": 11,
"type": "DualCLIPLoader",
"pos": [0, 270],
"size": [350, 106],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [205],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "DualCLIPLoader"
},
"widgets_values": [
"clip_l.safetensors",
"llava_llama3_fp8_scaled.safetensors",
"hunyuan_video",
"default"
]
},
{
"id": 73,
"type": "VAEDecodeTiled",
"pos": [1150, 200],
"size": [210, 150],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 210
},
{
"name": "vae",
"type": "VAE",
"link": 211
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [215],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecodeTiled"
},
"widgets_values": [256, 64, 64, 8]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1150, 90],
"size": [210, 46],
"flags": {},
"order": 15,
"mode": 2,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 181
},
{
"name": "vae",
"type": "VAE",
"link": 206
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 74,
"type": "Note",
"pos": [1150, 360],
"size": [210, 170],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"Use the tiled decode node by default because most people will need it.\n\nLower the tile_size and overlap if you run out of memory."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 12,
"type": "UNETLoader",
"pos": [0, 150],
"size": [350, 82],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [190, 209],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["hunyuan_video_t2v_720p_bf16.safetensors", "default"],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 77,
"type": "Note",
"pos": [0, 0],
"size": [350, 110],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"Select a fp8 weight_dtype if you are running out of memory."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 13,
"type": "SamplerCustomAdvanced",
"pos": [860, 200],
"size": [272.36, 124.54],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "noise",
"type": "NOISE",
"link": 37,
"slot_index": 0
},
{
"name": "guider",
"type": "GUIDER",
"link": 30,
"slot_index": 1
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 19,
"slot_index": 2
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 20,
"slot_index": 3
},
{
"name": "latent_image",
"type": "LATENT",
"link": 180,
"slot_index": 4
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [181, 210],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustomAdvanced"
},
"widgets_values": []
},
{
"id": 44,
"type": "CLIPTextEncode",
"pos": [420, 200],
"size": [422.85, 164.31],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 205
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [175],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"anime style anime girl with massive fennec ears and one big fluffy tail, she has blonde hair long hair blue eyes wearing a pink sweater and a long blue skirt walking in a beautiful outdoor scenery with snow mountains in the background"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 75,
"type": "SaveAnimatedWEBP",
"pos": [1410, 200],
"size": [315, 366],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 215
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 80, "default"]
},
{
"id": 25,
"type": "RandomNoise",
"pos": [479, 618],
"size": [315, 82],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "NOISE",
"type": "NOISE",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "RandomNoise"
},
"widgets_values": [1, "randomize"],
"color": "#2a363b",
"bgcolor": "#3f5159"
},
{
"id": 78,
"type": "MarkdownNote",
"pos": [0, 525],
"size": [225, 60],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/hunyuan_video/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[19, 16, 0, 13, 2, "SAMPLER"],
[20, 17, 0, 13, 3, "SIGMAS"],
[30, 22, 0, 13, 1, "GUIDER"],
[37, 25, 0, 13, 0, "NOISE"],
[129, 26, 0, 22, 1, "CONDITIONING"],
[175, 44, 0, 26, 0, "CONDITIONING"],
[180, 45, 0, 13, 4, "LATENT"],
[181, 13, 0, 8, 0, "LATENT"],
[190, 12, 0, 17, 0, "MODEL"],
[195, 67, 0, 22, 0, "MODEL"],
[205, 11, 0, 44, 0, "CLIP"],
[206, 10, 0, 8, 1, "VAE"],
[209, 12, 0, 67, 0, "MODEL"],
[210, 13, 0, 73, 0, "LATENT"],
[211, 10, 0, 73, 1, "VAE"],
[215, 73, 0, 75, 0, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"groupNodes": {},
"ds": {
"scale": 0.86,
"offset": [315.94, 195.23]
}
},
"version": 0.4,
"models": [
{
"name": "hunyuan_video_vae_bf16.safetensors",
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/vae/hunyuan_video_vae_bf16.safetensors?download=true",
"directory": "vae"
},
{
"name": "llava_llama3_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/text_encoders/llava_llama3_fp8_scaled.safetensors?download=true",
"directory": "text_encoders"
},
{
"name": "clip_l.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors?download=true",
"directory": "clip"
},
{
"name": "hunyuan_video_t2v_720p_bf16.safetensors",
"url": "https://huggingface.co/Comfy-Org/HunyuanVideo_repackaged/resolve/main/split_files/diffusion_models/hunyuan_video_t2v_720p_bf16.safetensors?download=true",
"directory": "diffusion_models"
}
]
}

View File

@@ -0,0 +1,314 @@
{
"last_node_id": 24,
"last_link_id": 41,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [867.8, 375.7],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 39
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 17
},
{
"name": "latent_image",
"type": "LATENT",
"link": 18
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
237514639057514,
"randomize",
20,
2.5,
"euler",
"karras",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1207.8, 375.7],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 26
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "SaveAnimatedWEBP",
"pos": [1459, 376],
"size": [741.67, 564.59],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 10
}
],
"outputs": [],
"properties": {
"Node name for S&R": "SaveAnimatedWEBP"
},
"widgets_values": ["ComfyUI", 10, false, 85, "default"]
},
{
"id": 12,
"type": "SVD_img2vid_Conditioning",
"pos": [487.8, 395.7],
"size": [315, 218],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 24
},
{
"name": "init_image",
"type": "IMAGE",
"link": 41,
"slot_index": 1
},
{
"name": "vae",
"type": "VAE",
"link": 25
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"shape": 3,
"links": [40],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"shape": 3,
"links": [17],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"shape": 3,
"links": [18],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "SVD_img2vid_Conditioning"
},
"widgets_values": [1024, 576, 14, 127, 6, 0]
},
{
"id": 14,
"type": "VideoLinearCFGGuidance",
"pos": [487.8, 265.7],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [39],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VideoLinearCFGGuidance"
},
"widgets_values": [1]
},
{
"id": 15,
"type": "ImageOnlyCheckpointLoader",
"pos": [55, 267],
"size": [369.6, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [23],
"slot_index": 0
},
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [24],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [25, 26],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "ImageOnlyCheckpointLoader"
},
"widgets_values": ["svd.safetensors"]
},
{
"id": 23,
"type": "LoadImage",
"pos": [106, 441],
"size": [315, 314.0],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [41]
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["mountains.png", "image"]
},
{
"id": 24,
"type": "MarkdownNote",
"pos": [105, 810],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/video/#image-to-video)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[10, 8, 0, 10, 0, "IMAGE"],
[17, 12, 1, 3, 2, "CONDITIONING"],
[18, 12, 2, 3, 3, "LATENT"],
[23, 15, 0, 14, 0, "MODEL"],
[24, 15, 1, 12, 0, "CLIP_VISION"],
[25, 15, 2, 12, 2, "VAE"],
[26, 15, 2, 8, 1, "VAE"],
[39, 14, 0, 3, 0, "MODEL"],
[40, 12, 0, 3, 1, "CONDITIONING"],
[41, 23, 0, 12, 1, "IMAGE"]
],
"groups": [
{
"id": 1,
"title": "Image to Video",
"bounding": [480, 195, 954, 478],
"color": "#8A8",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.96,
"offset": [255.53, 68.37]
}
},
"version": 0.4,
"models": [
{
"name": "svd.safetensors",
"url": "https://huggingface.co/stabilityai/stable-video-diffusion-img2vid/resolve/main/svd.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,360 @@
{
"last_node_id": 31,
"last_link_id": 87,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [432, 158],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 81
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"outdoors in the yosemite national park mountains nature\n\n\n\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [434, 371],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 82
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["watermark, text\n"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1422, 387],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 42
},
{
"name": "vae",
"type": "VAE",
"link": 83
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [22],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [940, 180],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 80
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 72
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
152545289528694,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 29,
"type": "CheckpointLoaderSimple",
"pos": [17, 303],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [80],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [81, 82],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [83, 84],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["512-inpainting-ema.safetensors"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [-107, 726],
"size": [344, 346],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [85],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_outpaint_example.png", "image"]
},
{
"id": 30,
"type": "ImagePadForOutpaint",
"pos": [269, 727],
"size": [315, 174],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 85
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [87],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [86],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ImagePadForOutpaint"
},
"widgets_values": [0, 128, 0, 128, 40]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1671, 384],
"size": [360.55, 441.53],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 22
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 26,
"type": "VAEEncodeForInpaint",
"pos": [617, 720],
"size": [226.8, 98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 87
},
{
"name": "vae",
"type": "VAE",
"link": 84
},
{
"name": "mask",
"type": "MASK",
"link": 86
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [72],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncodeForInpaint"
},
"widgets_values": [8]
},
{
"id": 31,
"type": "MarkdownNote",
"pos": [30, 465],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/inpaint/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[22, 8, 0, 9, 0, "IMAGE"],
[42, 3, 0, 8, 0, "LATENT"],
[72, 26, 0, 3, 3, "LATENT"],
[80, 29, 0, 3, 0, "MODEL"],
[81, 29, 1, 6, 0, "CLIP"],
[82, 29, 1, 7, 0, "CLIP"],
[83, 29, 2, 8, 1, "VAE"],
[84, 29, 2, 26, 1, "VAE"],
[85, 20, 0, 30, 0, "IMAGE"],
[86, 30, 1, 26, 2, "MASK"],
[87, 30, 0, 26, 0, "IMAGE"]
],
"groups": [
{
"id": 1,
"title": "Load image and pad for outpainting",
"bounding": [-120, 600, 1038, 509],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.86,
"offset": [491.92, 146.6]
}
},
"version": 0.4,
"models": [
{
"name": "512-inpainting-ema.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-inpainting/resolve/main/512-inpainting-ema.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,323 @@
{
"last_node_id": 30,
"last_link_id": 84,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [432, 158],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 81
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"closeup photograph of maine coon (cat:1.2) in the yosemite national park mountains nature"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [434, 371],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 82
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["watermark, text\n"]
},
{
"id": 26,
"type": "VAEEncodeForInpaint",
"pos": [503, 669],
"size": [226.8, 98],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 73
},
{
"name": "vae",
"type": "VAE",
"link": 83
},
{
"name": "mask",
"type": "MASK",
"link": 79
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [72],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncodeForInpaint"
},
"widgets_values": [6]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1422, 387],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 42
},
{
"name": "vae",
"type": "VAE",
"link": 84
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [22],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1709, 356],
"size": [210, 250],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 22
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 29,
"type": "CheckpointLoaderSimple",
"pos": [30, 314],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [80],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [81, 82],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [83, 84],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["512-inpainting-ema.safetensors"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [49, 679],
"size": [385, 365],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [73],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [79],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_inpaint_example.png", "image"]
},
{
"id": 3,
"type": "KSampler",
"pos": [940, 180],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 80
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 72
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
1040111309094545,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 30,
"type": "MarkdownNote",
"pos": [30, 480],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/inpaint/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[22, 8, 0, 9, 0, "IMAGE"],
[42, 3, 0, 8, 0, "LATENT"],
[72, 26, 0, 3, 3, "LATENT"],
[73, 20, 0, 26, 0, "IMAGE"],
[79, 20, 1, 26, 2, "MASK"],
[80, 29, 0, 3, 0, "MODEL"],
[81, 29, 1, 6, 0, "CLIP"],
[82, 29, 1, 7, 0, "CLIP"],
[83, 29, 2, 26, 1, "VAE"],
[84, 29, 2, 8, 1, "VAE"]
],
"groups": [
{
"id": 1,
"title": "Load image and alpha mask for inpainting",
"bounding": [-15, 600, 786, 442],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.88,
"offset": [832.78, 166.61]
}
},
"version": 0.4,
"models": [
{
"name": "512-inpainting-ema.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-inpainting/resolve/main/512-inpainting-ema.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,540 @@
{
"last_node_id": 33,
"last_link_id": 62,
"nodes": [
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-60, 229],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [54],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8, 31],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["wd-illusion-fp16.safetensors"]
},
{
"id": 13,
"type": "CheckpointLoaderSimple",
"pos": [1296, -571],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [56],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [27],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["cardosAnime_v10.safetensors"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [370, 40],
"size": [510, 220],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"anime happy girl (fennec:1.2) (ears:1.3) blonde long (messy hair:1.1) blue eyes, wearing serafuku jeans (sitting on rock:1.15) (spread legs:1.15) (sneakers:0.95) in lake rural swiss village on the mountain side sky clouds HDR sunset\n(exceptional, best aesthetic, new, newest, best quality, masterpiece, extremely detailed, anime:1.05)\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [370, 300],
"size": [510, 190],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"lowres, bad anatomy, bad hands, (text:1.1), blurry, mutated hands and fingers, mutation, deformed face, ugly, (logo:1.1), cropped, worst quality, jpeg, (jpeg artifacts), deleted, old, oldest, (censored), (bad aesthetic), (mosaic censoring, bar censor, blur censor) earphones"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [560, 540],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1368, 768, 1]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1280, 140],
"size": [210, 46],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "SaveImage",
"pos": [1540, 140],
"size": [1174.13, 734.16],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 10
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 22,
"type": "CLIPSetLastLayer",
"pos": [1670, -550],
"size": [315, 58],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 27
}
],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [13, 14],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPSetLastLayer"
},
"widgets_values": [-2]
},
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [2060, -920],
"size": [662.38, 313.1],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [57],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"from far away anime happy girl (fennec ears:0.95) long (messy hair:1.3) blue eyes, wearing serafuku jeans sitting on rock spread legs (sneakers:0.95) in lake rural swiss village on the mountain side sky clouds HDR sunset\n"
]
},
{
"id": 14,
"type": "CLIPTextEncode",
"pos": [2060, -550],
"size": [660, 300],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 13
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [58],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), (text:1.1), letters, numbers, error, cropped, (jpeg artifacts:1.2), (signature:1.1), (watermark:1.1), username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.1), extra legs, (forehead mark) (penis)"
]
},
{
"id": 11,
"type": "VAEDecode",
"pos": [3240, -750],
"size": [210, 46],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 60
},
{
"name": "vae",
"type": "VAE",
"link": 31
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 12,
"type": "SaveImage",
"pos": [3540, -750],
"size": [1868.09, 1101.47],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 12
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 32,
"type": "KSampler",
"pos": [2830, -750],
"size": [315, 262],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 56
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 57
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 58
},
{
"name": "latent_image",
"type": "LATENT",
"link": 59
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [60],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
417682270866800,
"randomize",
8,
13,
"dpmpp_sde",
"simple",
0.5
]
},
{
"id": 27,
"type": "LatentUpscaleBy",
"pos": [1510, -160],
"size": [325.41, 82],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 62,
"slot_index": 0
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [59],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LatentUpscaleBy"
},
"widgets_values": ["bislerp", 1.5]
},
{
"id": 3,
"type": "KSampler",
"pos": [920, 140],
"size": [318.5, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 54
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7, 62],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
758448896326830,
"randomize",
14,
8,
"dpmpp_sde",
"simple",
1
]
},
{
"id": 33,
"type": "MarkdownNote",
"pos": [-45, 375],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/#more-examples)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[3, 4, 1, 6, 0, "CLIP"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[5, 4, 1, 7, 0, "CLIP"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[10, 8, 0, 10, 0, "IMAGE"],
[12, 11, 0, 12, 0, "IMAGE"],
[13, 22, 0, 14, 0, "CLIP"],
[14, 22, 0, 15, 0, "CLIP"],
[27, 13, 1, 22, 0, "CLIP"],
[31, 4, 2, 11, 1, "VAE"],
[54, 4, 0, 3, 0, "MODEL"],
[56, 13, 0, 32, 0, "MODEL"],
[57, 15, 0, 32, 1, "CONDITIONING"],
[58, 14, 0, 32, 2, "CONDITIONING"],
[59, 27, 0, 32, 3, "LATENT"],
[60, 32, 0, 11, 0, "LATENT"],
[62, 3, 0, 27, 0, "LATENT"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.76,
"offset": [1200.17, 444.58]
}
},
"version": 0.4,
"models": [
{
"name": "wd-illusion-fp16.safetensors",
"url": "https://huggingface.co/waifu-diffusion/wd-1-5-beta3/resolve/main/wd-illusion-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "cardosAnime_v10.safetensors",
"url": "https://huggingface.co/jomcs/NeverEnding_Dream-Feb19-2023/resolve/07c9bc67d4ac9a85b68321d9b62f20c00171d8d5/CarDos%20Anime/cardosAnime_v10.safetensors?download=true",
"directory": "checkpoints"
}
]
}

311
public/templates/lora.json Normal file
View File

@@ -0,0 +1,311 @@
{
"last_node_id": 11,
"last_link_id": 14,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 13
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["masterpiece best quality girl"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 12
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
851616030078638,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [210, 250],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-461, 288],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [10],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [11],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 10,
"type": "LoraLoader",
"pos": [-25, 144],
"size": [315, 126],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 10
},
{
"name": "clip",
"type": "CLIP",
"link": 11
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [12],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [13, 14],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoraLoader"
},
"widgets_values": ["epiNoiseoffset_v2.safetensors", 1, 1]
},
{
"id": 11,
"type": "MarkdownNote",
"pos": [-450, 435],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/inpaint/#outpainting)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"],
[10, 4, 0, 10, 0, "MODEL"],
[11, 4, 1, 10, 1, "CLIP"],
[12, 10, 0, 3, 0, "MODEL"],
[13, 10, 1, 6, 0, "CLIP"],
[14, 10, 1, 7, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.06,
"offset": [777.19, 192.48]
}
},
"version": 0.4,
"models": [
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "epiNoiseoffset_v2.safetensors",
"url": "https://civitai.com/api/download/models/16576?type=Model&format=SafeTensor&size=full&fp=fp16",
"directory": "loras"
}
]
}

View File

@@ -0,0 +1,357 @@
{
"last_node_id": 12,
"last_link_id": 18,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 14
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["bad hands"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 13
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["masterpiece best quality girl"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 12
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
513173432917412,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [210, 250],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 10,
"type": "LoraLoader",
"pos": [-27, 160],
"size": [315, 126],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 15
},
{
"name": "clip",
"type": "CLIP",
"link": 16
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [12],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [13, 14],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoraLoader"
},
"widgets_values": ["epiNoiseoffset_v2.safetensors", 1, 1]
},
{
"id": 11,
"type": "LoraLoader",
"pos": [-379, 160],
"size": [315, 126],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 17
},
{
"name": "clip",
"type": "CLIP",
"link": 18
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [15],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [16],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoraLoader"
},
"widgets_values": ["theovercomer8sContrastFix_sd15.safetensors", 1, 1]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-780, 284],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [17],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [18],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
},
{
"id": 12,
"type": "MarkdownNote",
"pos": [-765, 450],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/lora/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"],
[12, 10, 0, 3, 0, "MODEL"],
[13, 10, 1, 6, 0, "CLIP"],
[14, 10, 1, 7, 0, "CLIP"],
[15, 11, 0, 10, 0, "MODEL"],
[16, 11, 1, 10, 1, "CLIP"],
[17, 4, 0, 11, 0, "MODEL"],
[18, 4, 1, 11, 1, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.76,
"offset": [1200.17, 444.58]
}
},
"version": 0.4,
"models": [
{
"name": "theovercomer8sContrastFix_sd15.safetensors",
"url": "https://civitai.com/api/download/models/10350?type=Model&format=SafeTensor&size=full&fp=fp16",
"directory": "loras"
},
{
"name": "v1-5-pruned-emaonly-fp16.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "epiNoiseoffset_v2.safetensors",
"url": "https://civitai.com/api/download/models/16576?type=Model&format=SafeTensor&size=full&fp=fp16",
"directory": "loras"
}
]
}

View File

@@ -0,0 +1,482 @@
{
"last_node_id": 79,
"last_link_id": 190,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [420, 190],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [187],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"best quality, 4k, HDR, a tracking shot of a beautiful scene of the sea waves on the beach with a massive explosion in the water"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [420, 390],
"size": [425.28, 180.61],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [188],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"low quality, worst quality, deformed, distorted, disfigured, motion smear, motion artifacts, fused fingers, bad anatomy, weird hand, ugly"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1600, 30],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 171
},
{
"name": "vae",
"type": "VAE",
"link": 87
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [106],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 38,
"type": "CLIPLoader",
"pos": [60, 190],
"size": [315, 82],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [74, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5xxl_fp16.safetensors", "ltxv", "default"]
},
{
"id": 41,
"type": "SaveAnimatedWEBP",
"pos": [1830, 30],
"size": [680, 610],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 106
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 90, "default"]
},
{
"id": 44,
"type": "CheckpointLoaderSimple",
"pos": [520, 30],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [181],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": null
},
{
"name": "VAE",
"type": "VAE",
"links": [87, 189],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["ltx-video-2b-v0.9.safetensors"]
},
{
"id": 69,
"type": "LTXVConditioning",
"pos": [920, 60],
"size": [223.87, 78],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 183
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 184
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [166],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [167],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LTXVConditioning"
},
"widgets_values": [25]
},
{
"id": 71,
"type": "LTXVScheduler",
"pos": [856, 531],
"size": [315, 154],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "latent",
"type": "LATENT",
"shape": 7,
"link": 185
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"links": [182],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LTXVScheduler"
},
"widgets_values": [30, 2.05, 0.95, true, 0.1]
},
{
"id": 72,
"type": "SamplerCustom",
"pos": [1201, 32],
"size": [355.2, 230],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 181
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 166
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 167
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 172
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 182
},
{
"name": "latent_image",
"type": "LATENT",
"link": 186
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"links": [171],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustom"
},
"widgets_values": [true, 501744655390087, "randomize", 3]
},
{
"id": 73,
"type": "KSamplerSelect",
"pos": [860, 420],
"size": [315, 58],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"links": [172]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 76,
"type": "Note",
"pos": [40, 350],
"size": [360, 200],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"This model needs long descriptive prompts, if the prompt is too short the quality will suffer greatly."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 77,
"type": "LTXVImgToVideo",
"pos": [863, 181],
"size": [315, 214],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 187
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 188
},
{
"name": "vae",
"type": "VAE",
"link": 189
},
{
"name": "image",
"type": "IMAGE",
"link": 190
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [183],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [184],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"links": [185, 186],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "LTXVImgToVideo"
},
"widgets_values": [768, 512, 97, 1, 0.15]
},
{
"id": 78,
"type": "LoadImage",
"pos": [420, 620],
"size": [385.16, 333.33],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [190]
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["island.jpg", "image"]
},
{
"id": 79,
"type": "MarkdownNote",
"pos": [45, 600],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/ltxv/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[74, 38, 0, 6, 0, "CLIP"],
[75, 38, 0, 7, 0, "CLIP"],
[87, 44, 2, 8, 1, "VAE"],
[106, 8, 0, 41, 0, "IMAGE"],
[166, 69, 0, 72, 1, "CONDITIONING"],
[167, 69, 1, 72, 2, "CONDITIONING"],
[171, 72, 0, 8, 0, "LATENT"],
[172, 73, 0, 72, 3, "SAMPLER"],
[181, 44, 0, 72, 0, "MODEL"],
[182, 71, 0, 72, 4, "SIGMAS"],
[183, 77, 0, 69, 0, "CONDITIONING"],
[184, 77, 1, 69, 1, "CONDITIONING"],
[185, 77, 2, 71, 0, "LATENT"],
[186, 77, 2, 72, 5, "LATENT"],
[187, 6, 0, 77, 0, "CONDITIONING"],
[188, 7, 0, 77, 1, "CONDITIONING"],
[189, 44, 2, 77, 2, "VAE"],
[190, 78, 0, 77, 3, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.23,
"offset": [-35.52, 153.62]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "clip"
},
{
"name": "ltx-video-2b-v0.9.safetensors",
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,419 @@
{
"last_node_id": 77,
"last_link_id": 182,
"nodes": [
{
"id": 38,
"type": "CLIPLoader",
"pos": [60, 190],
"size": [315, 82],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [74, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5xxl_fp16.safetensors", "ltxv", "default"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1600, 30],
"size": [210, 46],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 171
},
{
"name": "vae",
"type": "VAE",
"link": 87
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [106],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 69,
"type": "LTXVConditioning",
"pos": [920, 60],
"size": [223.87, 78],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 169
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 170
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [166],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [167],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LTXVConditioning"
},
"widgets_values": [25]
},
{
"id": 72,
"type": "SamplerCustom",
"pos": [1201, 32],
"size": [355.2, 230],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 181
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 166
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 167
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 172
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 182
},
{
"name": "latent_image",
"type": "LATENT",
"link": 175
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"links": [171],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustom"
},
"widgets_values": [true, 497797676867141, "randomize", 3]
},
{
"id": 44,
"type": "CheckpointLoaderSimple",
"pos": [520, 30],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [181],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": null
},
{
"name": "VAE",
"type": "VAE",
"links": [87],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["ltx-video-2b-v0.9.safetensors"]
},
{
"id": 70,
"type": "EmptyLTXVLatentVideo",
"pos": [860, 240],
"size": [315, 130],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [168, 175],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLTXVLatentVideo"
},
"widgets_values": [768, 512, 97, 1]
},
{
"id": 71,
"type": "LTXVScheduler",
"pos": [856, 531],
"size": [315, 154],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "latent",
"type": "LATENT",
"shape": 7,
"link": 168
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"links": [182],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "LTXVScheduler"
},
"widgets_values": [30, 2.05, 0.95, true, 0.1]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [420, 190],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [169],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"A woman with long brown hair and light skin smiles at another woman with long blonde hair. The woman with brown hair wears a black jacket and has a small, barely noticeable mole on her right cheek. The camera angle is a close-up, focused on the woman with brown hair's face. The lighting is warm and natural, likely from the setting sun, casting a soft glow on the scene. The scene appears to be real-life footage."
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [420, 390],
"size": [425.28, 180.61],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [170],
"slot_index": 0
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"low quality, worst quality, deformed, distorted, disfigured, motion smear, motion artifacts, fused fingers, bad anatomy, weird hand, ugly"
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 73,
"type": "KSamplerSelect",
"pos": [860, 420],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"links": [172]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler"]
},
{
"id": 76,
"type": "Note",
"pos": [40, 350],
"size": [360, 200],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"This model needs long descriptive prompts, if the prompt is too short the quality will suffer greatly."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 41,
"type": "SaveAnimatedWEBP",
"pos": [1830, 30],
"size": [680, 610],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 106
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 90, "default"]
},
{
"id": 77,
"type": "MarkdownNote",
"pos": [45, 600],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/ltxv/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[74, 38, 0, 6, 0, "CLIP"],
[75, 38, 0, 7, 0, "CLIP"],
[87, 44, 2, 8, 1, "VAE"],
[106, 8, 0, 41, 0, "IMAGE"],
[166, 69, 0, 72, 1, "CONDITIONING"],
[167, 69, 1, 72, 2, "CONDITIONING"],
[168, 70, 0, 71, 0, "LATENT"],
[169, 6, 0, 69, 0, "CONDITIONING"],
[170, 7, 0, 69, 1, "CONDITIONING"],
[171, 72, 0, 8, 0, "LATENT"],
[172, 73, 0, 72, 3, "SAMPLER"],
[175, 70, 0, 72, 5, "LATENT"],
[181, 44, 0, 72, 0, "MODEL"],
[182, 71, 0, 72, 4, "SIGMAS"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.65,
"offset": [1490.32, 926.49]
}
},
"version": 0.4,
"models": [
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "clip"
},
{
"name": "ltx-video-2b-v0.9.safetensors",
"url": "https://huggingface.co/Lightricks/LTX-Video/resolve/main/ltx-video-2b-v0.9.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,502 @@
{
"last_node_id": 32,
"last_link_id": 43,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [180, 203],
"size": [425.28, 180.61],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 42
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(hands), text, error, cropped, (worst quality:1.2), (low quality:1.2), normal quality, (jpeg artifacts:1.3), signature, watermark, username, blurry, artist name, monochrome, sketch, censorship, censor, (copyright:1.2), extra legs, (forehead mark) (depth of field) (emotionless) (penis)"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [287, 462],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [768, 768, 1]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1053, 172],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 28
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 24,
"type": "CLIPTextEncode",
"pos": [-823, -550],
"size": [422.85, 164.31],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 43
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [37],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"(solo) girl photograph realistic (flat chest:0.9), (fennec ears:1.0) (fox ears:1.0), (messy hair) blonde hair, blue eyes, standing, serafuku sweater, (brick house) (scenery HDR landscape) (sun clouds) sky, mountains,\n\n"
]
},
{
"id": 21,
"type": "LoadImage",
"pos": [-560, -144],
"size": [272.84, 372.22],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [33],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["pose_present.png", "image"]
},
{
"id": 31,
"type": "CheckpointLoaderSimple",
"pos": [-1005, 281],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [41],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [42, 43],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": null
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["AOM3A1.safetensors"]
},
{
"id": 15,
"type": "VAELoader",
"pos": [720, 506],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["kl-f8-anime2.ckpt"]
},
{
"id": 27,
"type": "ControlNetLoader",
"pos": [-641, -245],
"size": [352.55, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [39],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_openpose_fp16.safetensors"]
},
{
"id": 26,
"type": "ControlNetLoader",
"pos": [156, -339],
"size": [343.32, 58],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"links": [38],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["control_v11p_sd15_scribble_fp16.safetensors"]
},
{
"id": 22,
"type": "ControlNetApply",
"pos": [-204, -240],
"size": [317.4, 98],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 37
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 39
},
{
"name": "image",
"type": "IMAGE",
"link": 33
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [35],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [1]
},
{
"id": 3,
"type": "KSampler",
"pos": [699, 167],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 41
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
894480165483805,
"randomize",
12,
6,
"dpmpp_sde",
"normal",
1
]
},
{
"id": 23,
"type": "ControlNetApply",
"pos": [550.81, -385.59],
"size": [317.4, 98],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 35
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 38
},
{
"name": "image",
"type": "IMAGE",
"link": 34
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [40],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetApply"
},
"widgets_values": [0.8]
},
{
"id": 20,
"type": "LoadImage",
"pos": [188, -217],
"size": [278.1, 361.87],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [34],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["house_scribble.png", "image"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1310, 169],
"size": [516.95, 567.67],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 32,
"type": "MarkdownNote",
"pos": [-1005, 435],
"size": [225, 60],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#mixing-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[28, 15, 0, 8, 1, "VAE"],
[33, 21, 0, 22, 2, "IMAGE"],
[34, 20, 0, 23, 2, "IMAGE"],
[35, 22, 0, 23, 0, "CONDITIONING"],
[37, 24, 0, 22, 0, "CONDITIONING"],
[38, 26, 0, 23, 1, "CONTROL_NET"],
[39, 27, 0, 22, 1, "CONTROL_NET"],
[40, 23, 0, 3, 1, "CONDITIONING"],
[41, 31, 0, 3, 0, "MODEL"],
[42, 31, 1, 7, 0, "CLIP"],
[43, 31, 1, 24, 0, "CLIP"]
],
"groups": [
{
"id": 1,
"title": "Apply Pose ControlNet",
"bounding": [-735, -360, 859, 323],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Apply Scribble ControlNet",
"bounding": [165, -480, 739, 336],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.81,
"offset": [2040.05, 734.44]
}
},
"version": 0.4,
"models": [
{
"name": "control_v11p_sd15_scribble_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_scribble_fp16.safetensors?download=true",
"directory": "controlnet"
},
{
"name": "AOM3A1.safetensors",
"url": "https://huggingface.co/Eata/Model_V1/resolve/main/AOM3A1.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "control_v11p_sd15_openpose_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/ControlNet-v1-1_fp16_safetensors/resolve/main/control_v11p_sd15_openpose_fp16.safetensors",
"directory": "controlnet"
},
{
"name": "kl-f8-anime2.ckpt",
"url": "https://huggingface.co/hakurei/waifu-diffusion-v1-4/resolve/main/vae/kl-f8-anime2.ckpt?download=true",
"directory": "vae"
}
]
}

View File

@@ -0,0 +1,308 @@
{
"last_node_id": 40,
"last_link_id": 79,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [863, 187],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 79
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 46
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 52
},
{
"name": "latent_image",
"type": "LATENT",
"link": 38
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [35],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
704883238463297,
"randomize",
30,
4.5,
"euler",
"simple",
1
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [46],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a fox moving quickly in a beautiful winter scenery nature trees sunset tracking camera"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [52],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1210, 190],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 35
},
{
"name": "vae",
"type": "VAE",
"link": 76
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [56],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 21,
"type": "EmptyMochiLatentVideo",
"pos": [520, 620],
"size": [315, 130],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [38],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyMochiLatentVideo"
},
"widgets_values": [848, 480, 37, 1]
},
{
"id": 28,
"type": "SaveAnimatedWEBP",
"pos": [1460, 190],
"size": [847.3, 602.03],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 56
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI", 24, false, 80, "default"]
},
{
"id": 37,
"type": "UNETLoader",
"pos": [420, 40],
"size": [315, 82],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [79],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "UNETLoader"
},
"widgets_values": ["mochi_preview_bf16.safetensors", "default"]
},
{
"id": 38,
"type": "CLIPLoader",
"pos": [40, 270],
"size": [315, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"links": [74, 75],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5xxl_fp16.safetensors", "mochi", "default"]
},
{
"id": 39,
"type": "VAELoader",
"pos": [890, 500],
"size": [278.68, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"links": [76]
}
],
"properties": {
"Node name for S&R": "VAELoader"
},
"widgets_values": ["mochi_vae.safetensors"]
},
{
"id": 40,
"type": "MarkdownNote",
"pos": [45, 405],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/mochi/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[35, 3, 0, 8, 0, "LATENT"],
[38, 21, 0, 3, 3, "LATENT"],
[46, 6, 0, 3, 1, "CONDITIONING"],
[52, 7, 0, 3, 2, "CONDITIONING"],
[56, 8, 0, 28, 0, "IMAGE"],
[74, 38, 0, 6, 0, "CLIP"],
[75, 38, 0, 7, 0, "CLIP"],
[76, 39, 0, 8, 1, "VAE"],
[79, 37, 0, 3, 0, "MODEL"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [35.42, 115.48]
}
},
"version": 0.4,
"models": [
{
"name": "mochi_vae.safetensors",
"url": "https://huggingface.co/Comfy-Org/mochi_preview_repackaged/resolve/main/split_files/vae/mochi_vae.safetensors?download=true",
"directory": "vae"
},
{
"name": "t5xxl_fp16.safetensors",
"url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors?download=true",
"directory": "clip"
},
{
"name": "mochi_preview_bf16.safetensors",
"url": "https://huggingface.co/Comfy-Org/mochi_preview_repackaged/resolve/main/split_files/diffusion_models/mochi_preview_bf16.safetensors?download=true",
"directory": "diffusion_models"
}
]
}

View File

@@ -0,0 +1,470 @@
{
"last_node_id": 52,
"last_link_id": 105,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1152, 48],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 63
},
{
"name": "vae",
"type": "VAE",
"link": 8
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 33,
"type": "EmptySD3LatentImage",
"pos": [576, 336],
"size": [210, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [66],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 48,
"type": "ImageScale",
"pos": [-320, 448],
"size": [315, 130],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 91
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [92],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ImageScale"
},
"widgets_values": ["bilinear", 1024, 1024, "center"]
},
{
"id": 49,
"type": "PreviewImage",
"pos": [384, 512],
"size": [443.1, 520.83],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 93
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 50,
"type": "ConditioningZeroOut",
"pos": [203, 133],
"size": [317.4, 26],
"flags": {
"collapsed": true
},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 98
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [102],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningZeroOut"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [816, 48],
"size": [284.12, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 103,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 104
},
{
"name": "latent_image",
"type": "LATENT",
"link": 66
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [63],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
790192293768778,
"randomize",
32,
4.5,
"euler",
"simple",
1
]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1392, 48],
"size": [882.45, 927.85],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 13
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 45,
"type": "LoadImage",
"pos": [-666, 447],
"size": [288, 336],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [91]
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sd3_controlnet_example.png", "image"]
},
{
"id": 47,
"type": "Canny",
"pos": [20, 449],
"size": [315, 82],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 92
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [93, 99],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "Canny"
},
"widgets_values": [0.4, 0.8]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [0, -128],
"size": [320, 192],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 65
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [98, 101],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"happy cute anime fox girl with massive fluffy fennec ears and blonde fluffy hair long hair blue eyes wearing a red scarf a pink sweater and blue jeans\n\nstanding in a beautiful forest with mountains\n\n"
]
},
{
"id": 51,
"type": "ControlNetApplyAdvanced",
"pos": [470, 60],
"size": [315, 186],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 101
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 102
},
{
"name": "control_net",
"type": "CONTROL_NET",
"link": 100
},
{
"name": "image",
"type": "IMAGE",
"link": 99
},
{
"name": "vae",
"type": "VAE",
"shape": 7,
"link": 105
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [103],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [104],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ControlNetApplyAdvanced"
},
"widgets_values": [0.66, 0, 1]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-576, 64],
"size": [499.99, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [14],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [65],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8, 105],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd3.5_large_fp8_scaled.safetensors"]
},
{
"id": 46,
"type": "ControlNetLoader",
"pos": [-128, 320],
"size": [460.34, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CONTROL_NET",
"type": "CONTROL_NET",
"shape": 3,
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ControlNetLoader"
},
"widgets_values": ["sd3.5_large_controlnet_canny.safetensors"]
},
{
"id": 52,
"type": "MarkdownNote",
"pos": [-570, 210],
"size": [225, 60],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sd3/#sd35-controlnets)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[8, 4, 2, 8, 1, "VAE"],
[13, 8, 0, 9, 0, "IMAGE"],
[14, 4, 0, 3, 0, "MODEL"],
[63, 3, 0, 8, 0, "LATENT"],
[65, 4, 1, 6, 0, "CLIP"],
[66, 33, 0, 3, 3, "LATENT"],
[91, 45, 0, 48, 0, "IMAGE"],
[92, 48, 0, 47, 0, "IMAGE"],
[93, 47, 0, 49, 0, "IMAGE"],
[98, 6, 0, 50, 0, "CONDITIONING"],
[99, 47, 0, 51, 3, "IMAGE"],
[100, 46, 0, 51, 2, "CONTROL_NET"],
[101, 6, 0, 51, 0, "CONDITIONING"],
[102, 50, 0, 51, 1, "CONDITIONING"],
[103, 51, 0, 3, 1, "CONDITIONING"],
[104, 51, 1, 3, 2, "CONDITIONING"],
[105, 4, 2, 51, 4, "VAE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.91,
"offset": [686.52, 188.52]
}
},
"version": 0.4,
"models": [
{
"name": "sd3.5_large_controlnet_canny.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-3.5-controlnets/resolve/main/sd3.5_large_controlnet_canny.safetensors?download=true",
"directory": "controlnet"
},
{
"name": "sd3.5_large_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-3.5-fp8/resolve/main/sd3.5_large_fp8_scaled.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,278 @@
{
"last_node_id": 54,
"last_link_id": 102,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1200, 96],
"size": [210, 46],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 53,
"slot_index": 1
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [51],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1440, 96],
"size": [952.51, 1007.93],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 51,
"slot_index": 0
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 40,
"type": "CLIPTextEncode",
"pos": [384, 336],
"size": [432, 192],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 102
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [80],
"slot_index": 0
}
],
"title": "Negative Prompt",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 53,
"type": "EmptySD3LatentImage",
"pos": [480, 576],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptySD3LatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-48, 96],
"size": [384.76, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [99],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [101, 102],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [53],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd3.5_large_fp8_scaled.safetensors"]
},
{
"id": 16,
"type": "CLIPTextEncode",
"pos": [384, 96],
"size": [432, 192],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 101
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [21],
"slot_index": 0
}
],
"title": "Positive Prompt",
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"a bottle with a pink and red galaxy inside it on top of a wooden table on a table in the middle of a modern kitchen with a window to the outdoors mountain range bright sun clouds forest"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 3,
"type": "KSampler",
"pos": [864, 96],
"size": [315, 262],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 99,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 21
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 80
},
{
"name": "latent_image",
"type": "LATENT",
"link": 100
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
585483408983215,
"randomize",
20,
4.01,
"euler",
"sgm_uniform",
1
]
},
{
"id": 54,
"type": "MarkdownNote",
"pos": [-45, 240],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sd3/#sd35)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[21, 16, 0, 3, 1, "CONDITIONING"],
[51, 8, 0, 9, 0, "IMAGE"],
[53, 4, 2, 8, 1, "VAE"],
[80, 40, 0, 3, 2, "CONDITIONING"],
[99, 4, 0, 3, 0, "MODEL"],
[100, 53, 0, 3, 3, "LATENT"],
[101, 4, 1, 16, 0, "CLIP"],
[102, 4, 1, 40, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.14,
"offset": [93.35, -1.71]
}
},
"version": 0.4,
"models": [
{
"name": "sd3.5_large_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-3.5-fp8/resolve/main/sd3.5_large_fp8_scaled.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,728 @@
{
"last_node_id": 49,
"last_link_id": 44,
"nodes": [
{
"id": 36,
"type": "Note",
"pos": [-74, -470],
"size": [315.7, 147.96],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint BASE",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 37,
"type": "Note",
"pos": [610, -460],
"size": [330, 140],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint REFINER",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations."
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 40,
"type": "Note",
"pos": [1325, 234],
"size": [451.5, 424.42],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - KSampler ADVANCED General Information",
"properties": {
"text": ""
},
"widgets_values": [
"Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)."
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [544.5, 651.12],
"size": [300, 110],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 17,
"type": "VAEDecode",
"pos": [2220.77, 129.6],
"size": [200, 50],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 25
},
{
"name": "vae",
"type": "VAE",
"link": 34
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": [],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 41,
"type": "Note",
"pos": [2160.77, 229.6],
"size": [320, 120],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - VAE Decoder",
"properties": {
"text": ""
},
"widgets_values": [
"This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG."
],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 42,
"type": "Note",
"pos": [564.5, 801.12],
"size": [260, 210],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Empty Latent Image",
"properties": {
"text": ""
},
"widgets_values": [
"This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 11,
"type": "KSamplerAdvanced",
"pos": [1800, 130],
"size": [300, 340],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 23
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 24
},
{
"name": "latent_image",
"type": "LATENT",
"link": 13
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 38,
"slot_index": 4
},
{
"name": "start_at_step",
"type": "INT",
"widget": {
"name": "start_at_step"
},
"link": 44
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [25],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - REFINER",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"disable",
0,
"fixed",
25,
8,
"euler",
"normal",
20,
10000,
"disable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 12,
"type": "CheckpointLoaderSimple",
"pos": [600, -611],
"size": [350, 100],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [14],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [19, 20],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [34],
"slot_index": 2
}
],
"title": "Load Checkpoint - REFINER",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_refiner_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-90, -620],
"size": [350, 100],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [10],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [],
"slot_index": 2
}
],
"title": "Load Checkpoint - BASE",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 47,
"type": "PrimitiveNode",
"pos": [1037.53, 881.61],
"size": [210, 82],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"links": [43, 44],
"slot_index": 0
}
],
"title": "end_at_step",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [20, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 45,
"type": "PrimitiveNode",
"pos": [1039.53, 734.61],
"size": [210, 82],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "steps"
},
"links": [38, 41]
}
],
"title": "steps",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [25, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 48,
"type": "Note",
"pos": [1036, 1018],
"size": [213.91, 110.17],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"These can be used to control the total sampling steps and the step at which the sampling switches to the refiner."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "KSamplerAdvanced",
"pos": [1000, 230],
"size": [300, 334],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 10
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 11
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 12
},
{
"name": "latent_image",
"type": "LATENT",
"link": 27
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 41,
"slot_index": 4
},
{
"name": "end_at_step",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"link": 43,
"slot_index": 5
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [13],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - BASE",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"enable",
6767725640732,
"randomize",
25,
8,
"euler",
"normal",
0,
20,
"enable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 16,
"type": "CLIPTextEncode",
"pos": [1110, -90],
"size": [340, 140],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [1110, -270],
"size": [340, 140],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"daytime scenery sky nature dark blue bottle with a galaxy stars milky way in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [610, 30],
"size": [320, 160],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [11],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["daytime sky nature dark blue galaxy bottle"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [610, 240],
"size": [320, 150],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 19,
"type": "SaveImage",
"pos": [2600, 130],
"size": [735.55, 823.98],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 28
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"],
"color": "#222",
"bgcolor": "#000"
},
{
"id": 49,
"type": "MarkdownNote",
"pos": [-90, -255],
"size": [225, 60],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[3, 4, 1, 6, 0, "CLIP"],
[5, 4, 1, 7, 0, "CLIP"],
[10, 4, 0, 10, 0, "MODEL"],
[11, 6, 0, 10, 1, "CONDITIONING"],
[12, 7, 0, 10, 2, "CONDITIONING"],
[13, 10, 0, 11, 3, "LATENT"],
[14, 12, 0, 11, 0, "MODEL"],
[19, 12, 1, 15, 0, "CLIP"],
[20, 12, 1, 16, 0, "CLIP"],
[23, 15, 0, 11, 1, "CONDITIONING"],
[24, 16, 0, 11, 2, "CONDITIONING"],
[25, 11, 0, 17, 0, "LATENT"],
[27, 5, 0, 10, 3, "LATENT"],
[28, 17, 0, 19, 0, "IMAGE"],
[34, 12, 2, 17, 1, "VAE"],
[38, 45, 0, 11, 4, "INT"],
[41, 45, 0, 10, 4, "INT"],
[43, 47, 0, 10, 5, "INT"],
[44, 47, 0, 11, 5, "INT"]
],
"groups": [
{
"id": 1,
"title": "Base Prompt",
"bounding": [585, -60, 366, 463],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Refiner Prompt",
"bounding": [1095, -360, 376, 429],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Load in BASE SDXL Model",
"bounding": [-105, -705, 369, 399],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Load in REFINER SDXL Model",
"bounding": [585, -705, 391, 400],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "Empty Latent Image",
"bounding": [525, 570, 339, 443],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "VAE Decoder",
"bounding": [2145, 45, 360, 350],
"color": "#b06634",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "Step Control",
"bounding": [1005, 630, 284, 524],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.63,
"offset": [1264.03, 812.09]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "sd_xl_refiner_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,495 @@
{
"last_node_id": 41,
"last_link_id": 106,
"nodes": [
{
"id": 13,
"type": "CLIPVisionEncode",
"pos": [135, -63],
"size": [253.6, 78],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 101
},
{
"name": "image",
"type": "IMAGE",
"link": 95
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 36,
"type": "CLIPVisionEncode",
"pos": [137, 24],
"size": [253.6, 78],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 102
},
{
"name": "image",
"type": "IMAGE",
"link": 98
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1277, -210],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 106
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-130, -295],
"size": [422.85, 164.31],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["anime"]
},
{
"id": 34,
"type": "LoadImage",
"pos": [-352, -29],
"size": [435.35, 377.59],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["mountains.png", "image"]
},
{
"id": 38,
"type": "LoadImage",
"pos": [-341, 412],
"size": [435.35, 377.59],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [98],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sunset.png", "image"]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [425, -18],
"size": [425.28, 180.61],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 105
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 40,
"type": "CheckpointLoaderSimple",
"pos": [-761, -275],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [103],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [104, 105],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [106],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"]
},
{
"id": 39,
"type": "CLIPVisionLoader",
"pos": [-760, -120],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [101, 102],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": ["clip_vision_g.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1542, -209],
"size": [635.19, 692.82],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [915, -218],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 103
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 97
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
133632471276133,
"randomize",
26,
8,
"dpmpp_3m_sde_gpu",
"exponential",
1
]
},
{
"id": 19,
"type": "unCLIPConditioning",
"pos": [347, -207],
"size": [262, 102],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 23
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 24
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [96],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 37,
"type": "unCLIPConditioning",
"pos": [626, -205],
"size": [262, 102],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 96
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 100
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [534, 214],
"size": [315, 106],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 41,
"type": "MarkdownNote",
"pos": [-750, -15],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/#revision)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[23, 6, 0, 19, 0, "CONDITIONING"],
[24, 13, 0, 19, 1, "CLIP_VISION_OUTPUT"],
[95, 34, 0, 13, 1, "IMAGE"],
[96, 19, 0, 37, 0, "CONDITIONING"],
[97, 37, 0, 3, 1, "CONDITIONING"],
[98, 38, 0, 36, 1, "IMAGE"],
[100, 36, 0, 37, 1, "CLIP_VISION_OUTPUT"],
[101, 39, 0, 13, 0, "CLIP_VISION"],
[102, 39, 0, 36, 0, "CLIP_VISION"],
[103, 40, 0, 3, 0, "MODEL"],
[104, 40, 1, 6, 0, "CLIP"],
[105, 40, 1, 7, 0, "CLIP"],
[106, 40, 2, 8, 1, "VAE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.1,
"offset": [962.72, 417.65]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "sd_xl_refiner_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "clip_vision_g.safetensors",
"url": "https://huggingface.co/comfyanonymous/clip_vision_g/resolve/main/clip_vision_g.safetensors?download=true",
"directory": "clip_vision"
}
]
}

View File

@@ -0,0 +1,499 @@
{
"last_node_id": 43,
"last_link_id": 111,
"nodes": [
{
"id": 13,
"type": "CLIPVisionEncode",
"pos": [135, -63],
"size": [253.6, 78],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 101
},
{
"name": "image",
"type": "IMAGE",
"link": 95
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 36,
"type": "CLIPVisionEncode",
"pos": [137, 24],
"size": [253.6, 78],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 102
},
{
"name": "image",
"type": "IMAGE",
"link": 98
}
],
"outputs": [
{
"name": "CLIP_VISION_OUTPUT",
"type": "CLIP_VISION_OUTPUT",
"links": [100],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionEncode"
},
"widgets_values": ["center"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1277, -210],
"size": [210, 46],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 106
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 34,
"type": "LoadImage",
"pos": [-352, -29],
"size": [435.35, 377.59],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [95],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["mountains.png", "image"]
},
{
"id": 38,
"type": "LoadImage",
"pos": [-341, 412],
"size": [435.35, 377.59],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [98],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["sunset.png", "image"]
},
{
"id": 40,
"type": "CheckpointLoaderSimple",
"pos": [-761, -275],
"size": [315, 98],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [103],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [104],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [106],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"]
},
{
"id": 39,
"type": "CLIPVisionLoader",
"pos": [-760, -120],
"size": [315, 58],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [101, 102],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPVisionLoader"
},
"widgets_values": ["clip_vision_g.safetensors"]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1542, -209],
"size": [635.19, 692.82],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 9
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 3,
"type": "KSampler",
"pos": [915, -218],
"size": [315, 262],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 103
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 97
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 111
},
{
"name": "latent_image",
"type": "LATENT",
"link": 2
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
133632471276133,
"randomize",
26,
8,
"dpmpp_3m_sde_gpu",
"exponential",
1
]
},
{
"id": 37,
"type": "unCLIPConditioning",
"pos": [626, -205],
"size": [262, 102],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 96
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 100
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [97],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [534, 214],
"size": [315, 106],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1]
},
{
"id": 19,
"type": "unCLIPConditioning",
"pos": [347, -207],
"size": [262, 102],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 110
},
{
"name": "clip_vision_output",
"type": "CLIP_VISION_OUTPUT",
"link": 24
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [96],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "unCLIPConditioning"
},
"widgets_values": [0.75, 0]
},
{
"id": 42,
"type": "ConditioningZeroOut",
"pos": [60, -211],
"size": [211.6, 26],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "conditioning",
"type": "CONDITIONING",
"link": 109,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [110],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "ConditioningZeroOut"
},
"widgets_values": []
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [-182, -184],
"size": [422.85, 164.31],
"flags": {
"collapsed": true
},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 104
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [109, 111],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""]
},
{
"id": 43,
"type": "MarkdownNote",
"pos": [-750, -15],
"size": [225, 60],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/#revision)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[2, 5, 0, 3, 3, "LATENT"],
[7, 3, 0, 8, 0, "LATENT"],
[9, 8, 0, 9, 0, "IMAGE"],
[24, 13, 0, 19, 1, "CLIP_VISION_OUTPUT"],
[95, 34, 0, 13, 1, "IMAGE"],
[96, 19, 0, 37, 0, "CONDITIONING"],
[97, 37, 0, 3, 1, "CONDITIONING"],
[98, 38, 0, 36, 1, "IMAGE"],
[100, 36, 0, 37, 1, "CLIP_VISION_OUTPUT"],
[101, 39, 0, 13, 0, "CLIP_VISION"],
[102, 39, 0, 36, 0, "CLIP_VISION"],
[103, 40, 0, 3, 0, "MODEL"],
[104, 40, 1, 6, 0, "CLIP"],
[106, 40, 2, 8, 1, "VAE"],
[109, 6, 0, 42, 0, "CONDITIONING"],
[110, 42, 0, 19, 0, "CONDITIONING"],
[111, 6, 0, 3, 2, "CONDITIONING"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1.49,
"offset": [1046.06, 311.39]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "sd_xl_refiner_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "clip_vision_g.safetensors",
"url": "https://huggingface.co/comfyanonymous/clip_vision_g/resolve/main/clip_vision_g.safetensors?download=true",
"directory": "clip_vision"
}
]
}

View File

@@ -0,0 +1,896 @@
{
"last_node_id": 49,
"last_link_id": 44,
"nodes": [
{
"id": 15,
"type": "CLIPTextEncode",
"pos": [1139.11, -121.79],
"size": [210, 54],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 19
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 21,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"evening sunset scenery blue sky nature, glass bottle with a galaxy in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 16,
"type": "CLIPTextEncode",
"pos": [1139.11, -31.79],
"size": [210, 54],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 20
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 22,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [24],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 14,
"type": "PrimitiveNode",
"pos": [117.74, 335.18],
"size": [300, 160],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"widget": {
"name": "text"
},
"links": [18, 22],
"slot_index": 0
}
],
"title": "Negative Prompt (Text)",
"properties": {
"Run widget replace on values": false
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 13,
"type": "PrimitiveNode",
"pos": [117.74, 135.18],
"size": [300, 160],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "STRING",
"type": "STRING",
"widget": {
"name": "text"
},
"links": [16, 21],
"slot_index": 0
}
],
"title": "Positive Prompt (Text)",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [
"evening sunset scenery blue sky nature, glass bottle with a galaxy in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 36,
"type": "Note",
"pos": [-74, -470],
"size": [315.7, 147.96],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint BASE",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Base SDXL model\n - This node is also used for SD1.5 and SD2.x models\n \nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 37,
"type": "Note",
"pos": [610, -460],
"size": [330, 140],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Load Checkpoint REFINER",
"properties": {
"text": ""
},
"widgets_values": [
"This is a checkpoint model loader. \n - This is set up automatically with the optimal settings for whatever SD model version you choose to use.\n - In this example, it is for the Refiner SDXL model\n\nNOTE: When loading in another person's workflow, be sure to manually choose your own *local* model. This also applies to LoRas and all their deviations."
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 38,
"type": "Note",
"pos": [126.74, 534.18],
"size": [284.33, 123.89],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Text Prompts",
"properties": {
"text": ""
},
"widgets_values": [
"These nodes are where you include the text for:\n - what you want in the picture (Positive Prompt, Green)\n - or what you don't want in the picture (Negative Prompt, Red)\n\nThis node type is called a \"PrimitiveNode\" if you are searching for the node type."
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 40,
"type": "Note",
"pos": [1325, 234],
"size": [451.5, 424.42],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - KSampler ADVANCED General Information",
"properties": {
"text": ""
},
"widgets_values": [
"Here are the settings that SHOULD stay in place if you want this workflow to work correctly:\n - add_noise: enable = This adds random noise into the picture so the model can denoise it\n\n - return_with_leftover_noise: enable = This sends the latent image data and all it's leftover noise to the next KSampler node.\n\nThe settings to pay attention to:\n - control_after_generate = generates a new random seed after each workflow job completed.\n - steps = This is the amount of iterations you would like to run the positive and negative CLIP prompts through. Each Step will add (positive) or remove (negative) pixels based on what stable diffusion \"thinks\" should be there according to the model's training\n - cfg = This is how much you want SDXL to adhere to the prompt. Lower CFG gives you more creative but often blurrier results. Higher CFG (recommended max 10) gives you stricter results according to the CLIP prompt. If the CFG value is too high, it can also result in \"burn-in\" where the edges of the picture become even stronger, often highlighting details in unnatural ways.\n - sampler_name = This is the sampler type, and unfortunately different samplers and schedulers have better results with fewer steps, while others have better success with higher steps. This will require experimentation on your part!\n - scheduler = The algorithm/method used to choose the timesteps to denoise the picture.\n - start_at_step = This is the step number the KSampler will start out it's process of de-noising the picture or \"removing the random noise to reveal the picture within\". The first KSampler usually starts with Step 0. Starting at step 0 is the same as setting denoise to 1.0 in the regular Sampler node.\n - end_at_step = This is the step number the KSampler will stop it's process of de-noising the picture. If there is any remaining leftover noise and return_with_leftover_noise is enabled, then it will pass on the left over noise to the next KSampler (assuming there is another one)."
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [544.5, 651.12],
"size": [300, 110],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [27],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 1024, 1],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 17,
"type": "VAEDecode",
"pos": [2220.77, 129.6],
"size": [200, 50],
"flags": {},
"order": 23,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 25
},
{
"name": "vae",
"type": "VAE",
"link": 34
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [28],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": [],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 41,
"type": "Note",
"pos": [2160.77, 229.6],
"size": [320, 120],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - VAE Decoder",
"properties": {
"text": ""
},
"widgets_values": [
"This node will take the latent data from the KSampler and, using the VAE, it will decode it into visible data\n\nVAE = Latent --> Visible\n\nThis can then be sent to the Save Image node to be saved as a PNG."
],
"color": "#332922",
"bgcolor": "#593930"
},
{
"id": 42,
"type": "Note",
"pos": [564.5, 801.12],
"size": [260, 210],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - Empty Latent Image",
"properties": {
"text": ""
},
"widgets_values": [
"This node sets the image's resolution in Width and Height.\n\nNOTE: For SDXL, it is recommended to use trained values listed below:\n - 1024 x 1024\n - 1152 x 896\n - 896 x 1152\n - 1216 x 832\n - 832 x 1216\n - 1344 x 768\n - 768 x 1344\n - 1536 x 640\n - 640 x 1536"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 43,
"type": "Note",
"pos": [1125, 70],
"size": [240, 80],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - CLIP Encode (REFINER)",
"properties": {
"text": ""
},
"widgets_values": [
"These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Refiner)"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [599.5, 269.48],
"size": [210, 54],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 3
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 16,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [11],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"evening sunset scenery blue sky nature, glass bottle with a galaxy in it"
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [599.5, 359.48],
"size": [210, 54],
"flags": {},
"order": 20,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 5
},
{
"name": "text",
"type": "STRING",
"widget": {
"name": "text"
},
"link": 18,
"slot_index": 1
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [12],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 39,
"type": "Note",
"pos": [599.5, 449.48],
"size": [210, 80],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Note - CLIP Encode (BASE)",
"properties": {
"text": ""
},
"widgets_values": [
"These nodes receive the text from the prompt and use the optimal CLIP settings for the specified checkpoint model (in this case: SDXL Base)"
],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 11,
"type": "KSamplerAdvanced",
"pos": [1800, 130],
"size": [300, 340],
"flags": {},
"order": 22,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 14,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 23
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 24
},
{
"name": "latent_image",
"type": "LATENT",
"link": 13
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 38,
"slot_index": 4
},
{
"name": "start_at_step",
"type": "INT",
"widget": {
"name": "start_at_step"
},
"link": 44
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [25],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - REFINER",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"disable",
0,
"fixed",
25,
8,
"euler",
"normal",
20,
10000,
"disable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 12,
"type": "CheckpointLoaderSimple",
"pos": [600, -611],
"size": [350, 100],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [14],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [19, 20],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [34],
"slot_index": 2
}
],
"title": "Load Checkpoint - REFINER",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_refiner_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [-90, -620],
"size": [350, 100],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [10],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [],
"slot_index": 2
}
],
"title": "Load Checkpoint - BASE",
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"],
"color": "#323",
"bgcolor": "#535"
},
{
"id": 19,
"type": "SaveImage",
"pos": [2600, 130],
"size": [565.77, 596.38],
"flags": {},
"order": 24,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 28
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"],
"color": "#222",
"bgcolor": "#000"
},
{
"id": 47,
"type": "PrimitiveNode",
"pos": [1037.53, 881.61],
"size": [210, 82],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"links": [43, 44],
"slot_index": 0
}
],
"title": "end_at_step",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [20, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 45,
"type": "PrimitiveNode",
"pos": [1039.53, 734.61],
"size": [210, 82],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "INT",
"type": "INT",
"widget": {
"name": "steps"
},
"links": [38, 41]
}
],
"title": "steps",
"properties": {
"Run widget replace on values": false
},
"widgets_values": [25, "fixed"],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 48,
"type": "Note",
"pos": [1036, 1018],
"size": [213.91, 110.17],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {
"text": ""
},
"widgets_values": [
"These can be used to control the total sampling steps and the step at which the sampling switches to the refiner."
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 10,
"type": "KSamplerAdvanced",
"pos": [1000, 230],
"size": [300, 334],
"flags": {},
"order": 21,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 10
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 11
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 12
},
{
"name": "latent_image",
"type": "LATENT",
"link": 27
},
{
"name": "steps",
"type": "INT",
"widget": {
"name": "steps"
},
"link": 41,
"slot_index": 4
},
{
"name": "end_at_step",
"type": "INT",
"widget": {
"name": "end_at_step"
},
"link": 43,
"slot_index": 5
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [13],
"slot_index": 0
}
],
"title": "KSampler (Advanced) - BASE",
"properties": {
"Node name for S&R": "KSamplerAdvanced"
},
"widgets_values": [
"enable",
721897303308196,
"randomize",
25,
8,
"euler",
"normal",
0,
20,
"enable"
],
"color": "#223",
"bgcolor": "#335"
},
{
"id": 49,
"type": "MarkdownNote",
"pos": [-105, -255],
"size": [225, 60],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdxl/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[3, 4, 1, 6, 0, "CLIP"],
[5, 4, 1, 7, 0, "CLIP"],
[10, 4, 0, 10, 0, "MODEL"],
[11, 6, 0, 10, 1, "CONDITIONING"],
[12, 7, 0, 10, 2, "CONDITIONING"],
[13, 10, 0, 11, 3, "LATENT"],
[14, 12, 0, 11, 0, "MODEL"],
[16, 13, 0, 6, 1, "STRING"],
[18, 14, 0, 7, 1, "STRING"],
[19, 12, 1, 15, 0, "CLIP"],
[20, 12, 1, 16, 0, "CLIP"],
[21, 13, 0, 15, 1, "STRING"],
[22, 14, 0, 16, 1, "STRING"],
[23, 15, 0, 11, 1, "CONDITIONING"],
[24, 16, 0, 11, 2, "CONDITIONING"],
[25, 11, 0, 17, 0, "LATENT"],
[27, 5, 0, 10, 3, "LATENT"],
[28, 17, 0, 19, 0, "IMAGE"],
[34, 12, 2, 17, 1, "VAE"],
[38, 45, 0, 11, 4, "INT"],
[41, 45, 0, 10, 4, "INT"],
[43, 47, 0, 10, 5, "INT"],
[44, 47, 0, 11, 5, "INT"]
],
"groups": [
{
"id": 1,
"title": "Base Prompt",
"bounding": [585, 195, 252, 361],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Refiner Prompt",
"bounding": [1095, -195, 282, 372],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Text Prompts",
"bounding": [105, 45, 339, 622],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 4,
"title": "Load in BASE SDXL Model",
"bounding": [-105, -705, 369, 399],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 5,
"title": "Load in REFINER SDXL Model",
"bounding": [585, -705, 391, 400],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 6,
"title": "Empty Latent Image",
"bounding": [525, 570, 339, 443],
"color": "#a1309b",
"font_size": 24,
"flags": {}
},
{
"id": 7,
"title": "VAE Decoder",
"bounding": [2145, 45, 360, 350],
"color": "#b06634",
"font_size": 24,
"flags": {}
},
{
"id": 8,
"title": "Step Control",
"bounding": [1005, 630, 284, 524],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.78,
"offset": [685.2, 1020.68]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "sd_xl_refiner_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-refiner-1.0/resolve/main/sd_xl_refiner_1.0.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,372 @@
{
"last_node_id": 28,
"last_link_id": 54,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [352, 176],
"size": [425.28, 180.61],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 39
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [20],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 20,
"type": "CheckpointLoaderSimple",
"pos": [-17, -70],
"size": [343.7, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [41, 45],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [38, 39],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [40],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_turbo_1.0_fp16.safetensors"]
},
{
"id": 14,
"type": "KSamplerSelect",
"pos": [452, -144],
"size": [315, 58],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "SAMPLER",
"type": "SAMPLER",
"shape": 3,
"links": [18]
}
],
"properties": {
"Node name for S&R": "KSamplerSelect"
},
"widgets_values": ["euler_ancestral"]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [462, 398],
"size": [315, 106],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [23],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1183, -66],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 28
},
{
"name": "vae",
"type": "VAE",
"link": 40,
"slot_index": 1
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [53, 54],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 25,
"type": "PreviewImage",
"pos": [1213, 93],
"size": [501.7, 541.92],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 53
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 22,
"type": "SDTurboScheduler",
"pos": [452, -248],
"size": [315, 82],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 45,
"slot_index": 0
}
],
"outputs": [
{
"name": "SIGMAS",
"type": "SIGMAS",
"shape": 3,
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "SDTurboScheduler"
},
"widgets_values": [1, 1]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [351, -45],
"size": [422.85, 164.31],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 38,
"slot_index": 0
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [19],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"beautiful landscape scenery glass bottle with a galaxy inside cute fennec fox snow HDR sunset"
]
},
{
"id": 27,
"type": "SaveImage",
"pos": [1843, -154],
"size": [466.79, 516.83],
"flags": {},
"order": 10,
"mode": 2,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 54
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 13,
"type": "SamplerCustom",
"pos": [800, -66],
"size": [355.2, 230],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 41,
"slot_index": 0
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 19,
"slot_index": 1
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 20
},
{
"name": "sampler",
"type": "SAMPLER",
"link": 18,
"slot_index": 3
},
{
"name": "sigmas",
"type": "SIGMAS",
"link": 49,
"slot_index": 4
},
{
"name": "latent_image",
"type": "LATENT",
"link": 23,
"slot_index": 5
}
],
"outputs": [
{
"name": "output",
"type": "LATENT",
"shape": 3,
"links": [28],
"slot_index": 0
},
{
"name": "denoised_output",
"type": "LATENT",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "SamplerCustom"
},
"widgets_values": [true, 0, "fixed", 1]
},
{
"id": 28,
"type": "MarkdownNote",
"pos": [-15, 90],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/sdturbo/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[18, 14, 0, 13, 3, "SAMPLER"],
[19, 6, 0, 13, 1, "CONDITIONING"],
[20, 7, 0, 13, 2, "CONDITIONING"],
[23, 5, 0, 13, 5, "LATENT"],
[28, 13, 0, 8, 0, "LATENT"],
[38, 20, 1, 6, 0, "CLIP"],
[39, 20, 1, 7, 0, "CLIP"],
[40, 20, 2, 8, 1, "VAE"],
[41, 20, 0, 13, 0, "MODEL"],
[45, 20, 0, 22, 0, "MODEL"],
[49, 22, 0, 13, 4, "SIGMAS"],
[53, 8, 0, 25, 0, "IMAGE"],
[54, 8, 0, 27, 0, "IMAGE"]
],
"groups": [
{
"id": 1,
"title": "Unmute (CTRL-M) if you want to save images.",
"bounding": [1815, -255, 536, 676],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 1.02,
"offset": [311.24, 325.56]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_turbo_1.0_fp16.safetensors",
"url": "https://huggingface.co/stabilityai/sdxl-turbo/resolve/main/sd_xl_turbo_1.0_fp16.safetensors",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,302 @@
{
"last_node_id": 18,
"last_link_id": 26,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [864, 96],
"size": [315, 262],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 18
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 12,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [13],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
840755638734093,
"randomize",
50,
4.98,
"dpmpp_3m_sde_gpu",
"exponential",
1
]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [0, 240],
"size": [336, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [18],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [14],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["stable_audio_open_1.0.safetensors"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [384, 96],
"size": [432, 144],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 25
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["heaven church electronic dance music"],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [384, 288],
"size": [432, 144],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 26
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [""],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 10,
"type": "CLIPLoader",
"pos": [0, 96],
"size": [335.65, 82],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [25, 26],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPLoader"
},
"widgets_values": ["t5_base.safetensors", "stable_audio", "default"]
},
{
"id": 11,
"type": "EmptyLatentAudio",
"pos": [576, 480],
"size": [240, 82],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [12]
}
],
"properties": {
"Node name for S&R": "EmptyLatentAudio"
},
"widgets_values": [47.6, 1]
},
{
"id": 12,
"type": "VAEDecodeAudio",
"pos": [1200, 96],
"size": [210, 46],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 13
},
{
"name": "vae",
"type": "VAE",
"link": 14,
"slot_index": 1
}
],
"outputs": [
{
"name": "AUDIO",
"type": "AUDIO",
"shape": 3,
"links": [15],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecodeAudio"
},
"widgets_values": []
},
{
"id": 13,
"type": "SaveAudio",
"pos": [1440, 96],
"size": [355.22, 100],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "audio",
"type": "AUDIO",
"link": 15
}
],
"outputs": [],
"properties": {
"Node name for S&R": "SaveAudio"
},
"widgets_values": ["audio/ComfyUI", ""]
},
{
"id": 18,
"type": "MarkdownNote",
"pos": [15, 390],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/audio/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[12, 11, 0, 3, 3, "LATENT"],
[13, 3, 0, 12, 0, "LATENT"],
[14, 4, 2, 12, 1, "VAE"],
[15, 12, 0, 13, 0, "AUDIO"],
[18, 4, 0, 3, 0, "MODEL"],
[25, 10, 0, 6, 0, "CLIP"],
[26, 10, 0, 7, 0, "CLIP"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 1,
"offset": [201.78, 380.0]
}
},
"version": 0.4,
"models": [
{
"name": "t5_base.safetensors",
"url": "https://huggingface.co/google-t5/t5-base/resolve/main/model.safetensors",
"directory": "clip"
},
{
"name": "stable_audio_open_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-audio-open-1.0/resolve/main/model.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,273 @@
{
"last_node_id": 27,
"last_link_id": 55,
"nodes": [
{
"id": 8,
"type": "VAEDecode",
"pos": [1207.8, 375.7],
"size": [210, 46],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 26
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [49],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [868, 376],
"size": [315, 262],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 42
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 53
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 54
},
{
"name": "latent_image",
"type": "LATENT",
"link": 55
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
237514639057560,
"fixed",
20,
5,
"euler",
"sgm_uniform",
1
]
},
{
"id": 25,
"type": "SaveImage",
"pos": [1459, 378],
"size": [262.29, 308.65],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 49
}
],
"outputs": [],
"properties": {},
"widgets_values": ["3d/ComfyUI"]
},
{
"id": 23,
"type": "LoadImage",
"pos": [175, 438],
"size": [316.52, 405.71],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [51],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["hypernetwork_example_output.png", "image"]
},
{
"id": 26,
"type": "StableZero123_Conditioning",
"pos": [514, 394],
"size": [315, 194],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 50
},
{
"name": "init_image",
"type": "IMAGE",
"link": 51
},
{
"name": "vae",
"type": "VAE",
"link": 52
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"shape": 3,
"links": [53],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"shape": 3,
"links": [54],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"shape": 3,
"links": [55],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "StableZero123_Conditioning"
},
"widgets_values": [256, 256, 1, 10, 142]
},
{
"id": 15,
"type": "ImageOnlyCheckpointLoader",
"pos": [89, 290],
"size": [369.6, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [42],
"slot_index": 0
},
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [50],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [26, 52],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "ImageOnlyCheckpointLoader"
},
"widgets_values": ["stable_zero123.ckpt"]
},
{
"id": 27,
"type": "MarkdownNote",
"pos": [-75, 450],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/3d/)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[26, 15, 2, 8, 1, "VAE"],
[42, 15, 0, 3, 0, "MODEL"],
[49, 8, 0, 25, 0, "IMAGE"],
[50, 15, 1, 26, 0, "CLIP_VISION"],
[51, 23, 0, 26, 1, "IMAGE"],
[52, 15, 2, 26, 2, "VAE"],
[53, 26, 0, 3, 1, "CONDITIONING"],
[54, 26, 1, 3, 2, "CONDITIONING"],
[55, 26, 2, 3, 3, "LATENT"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.75,
"offset": [439.73, 40.67]
}
},
"version": 0.4,
"models": [
{
"name": "stable_zero123.ckpt",
"url": "https://huggingface.co/stabilityai/stable-zero123/resolve/main/stable_zero123.ckpt",
"directory": "checkpoints"
}
]
}

View File

@@ -0,0 +1,535 @@
{
"last_node_id": 23,
"last_link_id": 40,
"nodes": [
{
"id": 3,
"type": "KSampler",
"pos": [1843.74, 476.56],
"size": [315, 262],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 39
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 40
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 17
},
{
"name": "latent_image",
"type": "LATENT",
"link": 18
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
237514639057514,
"randomize",
20,
2.5,
"euler",
"karras",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [2183.74, 476.56],
"size": [210, 46],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 7
},
{
"name": "vae",
"type": "VAE",
"link": 26
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [10],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 10,
"type": "SaveAnimatedWEBP",
"pos": [1654, 829],
"size": [741.67, 564.59],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 10
}
],
"outputs": [],
"properties": {
"Node name for S&R": "SaveAnimatedWEBP"
},
"widgets_values": ["ComfyUI", 10, false, 85, "default"]
},
{
"id": 12,
"type": "SVD_img2vid_Conditioning",
"pos": [1463.74, 496.56],
"size": [315, 218],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip_vision",
"type": "CLIP_VISION",
"link": 24
},
{
"name": "init_image",
"type": "IMAGE",
"link": 35,
"slot_index": 1
},
{
"name": "vae",
"type": "VAE",
"link": 25
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"shape": 3,
"links": [40],
"slot_index": 0
},
{
"name": "negative",
"type": "CONDITIONING",
"shape": 3,
"links": [17],
"slot_index": 1
},
{
"name": "latent",
"type": "LATENT",
"shape": 3,
"links": [18],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "SVD_img2vid_Conditioning"
},
"widgets_values": [1024, 576, 25, 127, 6, 0]
},
{
"id": 14,
"type": "VideoLinearCFGGuidance",
"pos": [1463.74, 366.56],
"size": [315, 58],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 23
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [39],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VideoLinearCFGGuidance"
},
"widgets_values": [1]
},
{
"id": 15,
"type": "ImageOnlyCheckpointLoader",
"pos": [1050, 320],
"size": [369.6, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [23],
"slot_index": 0
},
{
"name": "CLIP_VISION",
"type": "CLIP_VISION",
"shape": 3,
"links": [24],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [25, 26],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "ImageOnlyCheckpointLoader"
},
"widgets_values": ["svd_xt.safetensors"]
},
{
"id": 16,
"type": "CheckpointLoaderSimple",
"pos": [0, 510],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"shape": 3,
"links": [28],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"shape": 3,
"links": [29, 31],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"shape": 3,
"links": [34]
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["sd_xl_base_1.0.safetensors"]
},
{
"id": 17,
"type": "KSampler",
"pos": [802.4, 566.4],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 28
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 30
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 32
},
{
"name": "latent_image",
"type": "LATENT",
"link": 37,
"slot_index": 3
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [33],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
144698910769133,
"randomize",
15,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 18,
"type": "CLIPTextEncode",
"pos": [342.4, 516.4],
"size": [390, 130],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 29
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [30],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"photograph beautiful scenery nature mountains alps river rapids snow sky cumulus clouds"
]
},
{
"id": 19,
"type": "CLIPTextEncode",
"pos": [342.4, 696.4],
"size": [390, 130],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 31
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"shape": 3,
"links": [32],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["text, watermark"]
},
{
"id": 20,
"type": "VAEDecode",
"pos": [1172.4, 566.4],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 33
},
{
"name": "vae",
"type": "VAE",
"link": 34,
"slot_index": 1
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [35, 36],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 21,
"type": "PreviewImage",
"pos": [1152.4, 656.4],
"size": [275.95, 246],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 36
}
],
"outputs": [],
"properties": {
"Node name for S&R": "PreviewImage"
},
"widgets_values": []
},
{
"id": 22,
"type": "EmptyLatentImage",
"pos": [422.4, 866.4],
"size": [310, 110],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"shape": 3,
"links": [37]
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [1024, 576, 1]
},
{
"id": 23,
"type": "MarkdownNote",
"pos": [0, 660],
"size": [225, 60],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/video/#image-to-video)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[7, 3, 0, 8, 0, "LATENT"],
[10, 8, 0, 10, 0, "IMAGE"],
[17, 12, 1, 3, 2, "CONDITIONING"],
[18, 12, 2, 3, 3, "LATENT"],
[23, 15, 0, 14, 0, "MODEL"],
[24, 15, 1, 12, 0, "CLIP_VISION"],
[25, 15, 2, 12, 2, "VAE"],
[26, 15, 2, 8, 1, "VAE"],
[28, 16, 0, 17, 0, "MODEL"],
[29, 16, 1, 18, 0, "CLIP"],
[30, 18, 0, 17, 1, "CONDITIONING"],
[31, 16, 1, 19, 0, "CLIP"],
[32, 19, 0, 17, 2, "CONDITIONING"],
[33, 17, 0, 20, 0, "LATENT"],
[34, 16, 2, 20, 1, "VAE"],
[35, 20, 0, 12, 1, "IMAGE"],
[36, 20, 0, 21, 0, "IMAGE"],
[37, 22, 0, 17, 3, "LATENT"],
[39, 14, 0, 3, 0, "MODEL"],
[40, 12, 0, 3, 1, "CONDITIONING"]
],
"groups": [
{
"id": 1,
"title": "Image to Video",
"bounding": [1455, 300, 954, 478],
"color": "#8A8",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Text to Image",
"bounding": [330, 435, 1106, 544],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 1.13,
"offset": [502.97, -29.59]
}
},
"version": 0.4,
"models": [
{
"name": "sd_xl_base_1.0.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/resolve/main/sd_xl_base_1.0.safetensors?download=true",
"directory": "checkpoints"
},
{
"name": "svd_xt.safetensors",
"url": "https://huggingface.co/stabilityai/stable-video-diffusion-img2vid-xt/resolve/main/svd_xt.safetensors?download=true",
"directory": "checkpoints"
}
]
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,360 @@
{
"last_node_id": 31,
"last_link_id": 87,
"nodes": [
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [432, 158],
"size": [422.85, 164.31],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 81
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": [
"outdoors in the yosemite national park mountains nature\n\n\n\n"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [434, 371],
"size": [425.28, 180.61],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 82
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "CLIPTextEncode"
},
"widgets_values": ["watermark, text\n"]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1422, 387],
"size": [210, 46],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 42
},
{
"name": "vae",
"type": "VAE",
"link": 83
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [22],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEDecode"
},
"widgets_values": []
},
{
"id": 3,
"type": "KSampler",
"pos": [940, 180],
"size": [315, 262],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 80
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 4
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 6
},
{
"name": "latent_image",
"type": "LATENT",
"link": 72
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [42],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [
152545289528694,
"randomize",
20,
8,
"uni_pc_bh2",
"normal",
1
]
},
{
"id": 29,
"type": "CheckpointLoaderSimple",
"pos": [17, 303],
"size": [315, 98],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [80],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [81, 82],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [83, 84],
"slot_index": 2
}
],
"properties": {
"Node name for S&R": "CheckpointLoaderSimple"
},
"widgets_values": ["512-inpainting-ema.safetensors"]
},
{
"id": 20,
"type": "LoadImage",
"pos": [-107, 726],
"size": [344, 346],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [85],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"links": [],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": ["yosemite_outpaint_example.png", "image"]
},
{
"id": 30,
"type": "ImagePadForOutpaint",
"pos": [269, 727],
"size": [315, 174],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{
"name": "image",
"type": "IMAGE",
"link": 85
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"shape": 3,
"links": [87],
"slot_index": 0
},
{
"name": "MASK",
"type": "MASK",
"shape": 3,
"links": [86],
"slot_index": 1
}
],
"properties": {
"Node name for S&R": "ImagePadForOutpaint"
},
"widgets_values": [0, 128, 0, 128, 40]
},
{
"id": 9,
"type": "SaveImage",
"pos": [1671, 384],
"size": [360.55, 441.53],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 22
}
],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 26,
"type": "VAEEncodeForInpaint",
"pos": [617, 720],
"size": [226.8, 98],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [
{
"name": "pixels",
"type": "IMAGE",
"link": 87
},
{
"name": "vae",
"type": "VAE",
"link": 84
},
{
"name": "mask",
"type": "MASK",
"link": 86
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [72],
"slot_index": 0
}
],
"properties": {
"Node name for S&R": "VAEEncodeForInpaint"
},
"widgets_values": [8]
},
{
"id": 31,
"type": "MarkdownNote",
"pos": [15, 465],
"size": [225, 60],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"\ud83d\udec8 [Learn more about this workflow](https://comfyanonymous.github.io/ComfyUI_examples/inpaint/#outpainting)"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[4, 6, 0, 3, 1, "CONDITIONING"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[22, 8, 0, 9, 0, "IMAGE"],
[42, 3, 0, 8, 0, "LATENT"],
[72, 26, 0, 3, 3, "LATENT"],
[80, 29, 0, 3, 0, "MODEL"],
[81, 29, 1, 6, 0, "CLIP"],
[82, 29, 1, 7, 0, "CLIP"],
[83, 29, 2, 8, 1, "VAE"],
[84, 29, 2, 26, 1, "VAE"],
[85, 20, 0, 30, 0, "IMAGE"],
[86, 30, 1, 26, 2, "MASK"],
[87, 30, 0, 26, 0, "IMAGE"]
],
"groups": [
{
"id": 1,
"title": "Load image and pad for outpainting",
"bounding": [-120, 600, 1038, 509],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.93,
"offset": [359.29, 119.05]
}
},
"version": 0.4,
"models": [
{
"name": "512-inpainting-ema.safetensors",
"url": "https://huggingface.co/stabilityai/stable-diffusion-2-inpainting/resolve/main/512-inpainting-ema.safetensors?download=true",
"directory": "checkpoints"
}
]
}

View File

@@ -1,26 +1,34 @@
<template>
<Card :data-testid="`template-workflow-${props.workflowName}`">
<Card :data-testid="`template-workflow-${template.name}`" class="w-64">
<template #header>
<div class="flex items-center justify-center">
<div
class="relative overflow-hidden rounded-t-lg cursor-pointer w-64 h-64"
>
<img
v-if="!imageError"
:src="
props.moduleName === 'default'
? `templates/${props.workflowName}.jpg`
: `api/workflow_templates/${props.moduleName}/${props.workflowName}.jpg`
"
@error="imageError = true"
class="w-64 h-64 rounded-t-lg object-cover thumbnail"
/>
<div v-else class="w-64 h-64 content-center text-center">
<i class="pi pi-file" style="font-size: 4rem"></i>
</div>
<a>
<div class="relative overflow-hidden rounded-t-lg cursor-pointer">
<template v-if="template.mediaType === 'audio'">
<div class="w-64 h-64 flex items-center justify-center p-4 z-20">
<audio
controls
class="w-full relative z-20"
:src="thumbnailSrc"
@error="imageError = true"
@click.stop
/>
</div>
</template>
<template v-else>
<img
v-if="!imageError"
:src="thumbnailSrc"
:alt="title"
class="w-64 h-64 rounded-t-lg object-cover thumbnail"
@error="imageError = true"
/>
<div v-else class="w-64 h-64 content-center text-center">
<i class="pi pi-file" style="font-size: 4rem"></i>
</div>
</template>
<a @click="$emit('loadWorkflow', template.name)">
<div
class="absolute top-0 left-0 w-64 h-64 overflow-hidden opacity-0 transition duration-300 ease-in-out hover:opacity-100 bg-opacity-50 bg-black flex items-center justify-center"
class="absolute top-0 left-0 w-64 h-64 overflow-hidden opacity-0 transition duration-300 ease-in-out hover:opacity-100 bg-opacity-50 bg-black flex items-center justify-center z-10"
>
<i class="pi pi-play-circle" style="color: white"></i>
</div>
@@ -33,18 +41,9 @@
</div>
</template>
<template #subtitle>
<!--Default templates have translations-->
<template v-if="props.moduleName === 'default'">
{{
$t(
`templateWorkflows.template.${props.workflowName}`,
props.workflowName
)
}}
</template>
<template v-else>
{{ props.workflowName }}
</template>
<div class="text-center">
{{ title }}
</div>
</template>
</Card>
</template>
@@ -52,15 +51,39 @@
<script setup lang="ts">
import Card from 'primevue/card'
import ProgressSpinner from 'primevue/progressspinner'
import { ref } from 'vue'
import { computed, ref } from 'vue'
import { useI18n } from 'vue-i18n'
const props = defineProps<{
moduleName: string
workflowName: string
import { TemplateInfo } from '@/types/workflowTemplateTypes'
const { sourceModule, categoryTitle, loading, template } = defineProps<{
sourceModule: string
categoryTitle: string
loading: boolean
template: TemplateInfo
}>()
const { t } = useI18n()
const imageError = ref(false)
const thumbnailSrc = computed(() =>
sourceModule === 'default'
? `/templates/${template.name}.${template.mediaSubtype}`
: `/api/workflow_templates/${sourceModule}/${template.name}.${template.mediaSubtype}`
)
const title = computed(() => {
return sourceModule === 'default'
? t(
`templateWorkflows.template.${categoryTitle}.${template.name}`,
template.name
)
: template.name ?? `${sourceModule} Template`
})
defineEmits<{
loadWorkflow: [name: string]
}>()
</script>
<style lang="css" scoped>
@@ -68,8 +91,4 @@ const imageError = ref(false)
--p-card-body-padding: 10px 0 0 0;
overflow: hidden;
}
:deep(.p-card-subtitle) {
text-align: center;
}
</style>

View File

@@ -1,5 +1,8 @@
<template>
<div class="flex h-96" data-testid="template-workflows-content">
<div
class="flex h-96 overflow-y-hidden"
data-testid="template-workflows-content"
>
<div class="relative">
<ProgressSpinner
v-if="!workflowTemplatesStore.isLoaded"
@@ -9,7 +12,9 @@
:model-value="selectedTab"
@update:model-value="handleTabSelection"
:options="tabs"
optionLabel="title"
option-group-label="label"
option-label="title"
option-group-children="modules"
scroll-height="auto"
class="overflow-y-auto w-64 h-full"
listStyle="max-height:unset"
@@ -19,16 +24,18 @@
class="carousel justify-center"
:value="selectedTab.templates"
:responsive-options="responsiveOptions"
:numVisible="4"
:numScroll="3"
:key="selectedTab.moduleName"
:num-visible="4"
:num-scroll="3"
:key="`${selectedTab.moduleName}${selectedTab.title}`"
>
<template #item="slotProps">
<div @click="loadWorkflow(slotProps.data)" class="p-2">
<div class="p-2 justify-items-center">
<TemplateWorkflowCard
:moduleName="selectedTab.moduleName"
:workflowName="slotProps.data"
:loading="slotProps.data === workflowLoading"
:sourceModule="selectedTab.moduleName"
:template="slotProps.data"
:loading="slotProps.data.name === workflowLoading"
:categoryTitle="selectedTab.title"
@loadWorkflow="loadWorkflow"
/>
</div>
</template>
@@ -48,22 +55,10 @@ import { api } from '@/scripts/api'
import { app } from '@/scripts/app'
import { useDialogStore } from '@/stores/dialogStore'
import { useWorkflowTemplatesStore } from '@/stores/workflowTemplatesStore'
interface WorkflowTemplatesTab {
moduleName: string
title: string
templates: string[]
}
import type { WorkflowTemplates } from '@/types/workflowTemplateTypes'
const { t } = useI18n()
//These default templates are provided by the frontend
const comfyUITemplates: WorkflowTemplatesTab = {
moduleName: 'default',
title: 'ComfyUI',
templates: ['default', 'image2image', 'upscale', 'flux_schnell']
}
const responsiveOptions = ref([
{
breakpoint: '1660px',
@@ -83,25 +78,18 @@ const responsiveOptions = ref([
])
const workflowTemplatesStore = useWorkflowTemplatesStore()
const selectedTab = ref<WorkflowTemplatesTab>(comfyUITemplates)
const selectedTab = ref<WorkflowTemplates | null>(
workflowTemplatesStore?.defaultTemplate
)
const workflowLoading = ref<string | null>(null)
const tabs = computed<WorkflowTemplatesTab[]>(() => {
return [
comfyUITemplates,
...Object.entries(workflowTemplatesStore.items).map(([key, value]) => ({
moduleName: key,
title: key,
templates: value
}))
]
})
const tabs = computed(() => workflowTemplatesStore.groupedTemplates)
onMounted(async () => {
await workflowTemplatesStore.loadWorkflowTemplates()
})
const handleTabSelection = (selection: WorkflowTemplatesTab | null) => {
const handleTabSelection = (selection: WorkflowTemplates | null) => {
//Listbox allows deselecting so this special case is ignored here
if (selection !== selectedTab.value && selection !== null)
selectedTab.value = selection

View File

@@ -0,0 +1,384 @@
export const CORE_TEMPLATES = [
{
moduleName: 'default',
title: 'Basics',
type: 'image',
templates: [
{
name: 'default',
tutorialUrl:
'https://github.com/comfyanonymous/ComfyUI/wiki/Basic-Tutorial',
mediaType: 'image',
mediaSubtype: 'png'
},
{
name: 'image2image',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/img2img/'
},
{
name: 'lora',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl: 'https://comfyanonymous.github.io/ComfyUI_examples/lora/'
},
{
name: 'inpaint_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/inpaint/'
},
{
name: 'yosemite_outpaint_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/inpaint/#outpainting'
},
{
name: 'inpain_model_outpainting',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/inpaint/'
},
{
name: 'embedding_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/textual_inversion_embeddings/'
},
{
name: 'gligen_textbox_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl: 'https://comfyanonymous.github.io/ComfyUI_examples/gligen/'
},
{
name: 'lora_multiple',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl: 'https://comfyanonymous.github.io/ComfyUI_examples/lora/'
}
]
},
{
moduleName: 'default',
title: 'Flux',
type: 'image',
templates: [
{
name: 'flux_dev_checkpoint_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-dev-1'
},
{
name: 'flux_schnell',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/flux/#flux-schnell-1'
},
{
name: 'flux_fill_inpaint_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/flux/#fill-inpainting-model'
},
{
name: 'flux_fill_outpaint_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/flux/#fill-inpainting-model'
},
{
name: 'flux_canny_model_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/flux/#canny-and-depth'
},
{
name: 'flux_redux_model_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/flux/#redux'
},
{
name: 'flux_depth_lora_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/flux/#canny-and-depth'
}
]
},
{
moduleName: 'default',
title: 'ControlNet',
type: 'image',
templates: [
{
name: 'controlnet_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/controlnet/'
},
{
name: '2_pass_pose_worship',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#2-pass-pose-worship'
},
{
name: 'depth_controlnet',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#pose-controlnet'
},
{
name: 'depth_t2i_adapter',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#t2i-adapter-vs-controlnets'
},
{
name: 'mixing_controlnets',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/controlnet/#mixing-controlnets'
}
]
},
{
moduleName: 'default',
title: 'Upscaling',
type: 'image',
templates: [
{
name: 'upscale',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/upscale_models/'
},
{
name: 'esrgan_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/upscale_models/'
},
{
name: 'hiresfix_latent_workflow',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/'
},
{
name: 'hiresfix_esrgan_workflow',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/#non-latent-upscaling'
},
{
name: 'latent_upscale_different_prompt_model',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/2_pass_txt2img/#more-examples'
}
]
},
{
moduleName: 'default',
title: 'Video',
type: 'video',
templates: [
{
name: 'image_to_video',
mediaType: 'image',
mediaSubtype: 'webp',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/video/#image-to-video'
},
{
name: 'txt_to_image_to_video',
mediaType: 'image',
mediaSubtype: 'webp',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/video/#image-to-video'
},
{
name: 'ltxv_image_to_video',
mediaType: 'image',
mediaSubtype: 'webp',
tutorialUrl: 'https://comfyanonymous.github.io/ComfyUI_examples/ltxv/'
},
{
name: 'ltxv_text_to_video',
mediaType: 'image',
mediaSubtype: 'webp',
tutorialUrl: 'https://comfyanonymous.github.io/ComfyUI_examples/ltxv/'
},
{
name: 'mochi_text_to_video_example',
mediaType: 'image',
mediaSubtype: 'webp',
tutorialUrl: 'https://comfyanonymous.github.io/ComfyUI_examples/mochi/'
},
{
name: 'hunyuan_video_text_to_video',
mediaType: 'image',
mediaSubtype: 'webp',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/hunyuan_video/'
}
]
},
{
moduleName: 'default',
title: 'SD3.5',
type: 'image',
templates: [
{
name: 'sd3.5_simple_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/sd3/#sd35'
},
{
name: 'sd3.5_large_canny_controlnet_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/sd3/#sd35-controlnets'
},
{
name: 'sd3.5_large_depth',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/sd3/#sd35-controlnets'
},
{
name: 'sd3.5_large_blur',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/sd3/#sd35-controlnets'
}
]
},
{
moduleName: 'default',
title: 'SDXL',
type: 'image',
templates: [
{
name: 'sdxl_simple_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl: 'https://comfyanonymous.github.io/ComfyUI_examples/sdxl/'
},
{
name: 'sdxl_refiner_prompt_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl: 'https://comfyanonymous.github.io/ComfyUI_examples/sdxl/'
},
{
name: 'sdxl_revision_text_prompts',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/sdxl/#revision'
},
{
name: 'sdxl_revision_zero_positive',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/sdxl/#revision'
},
{
name: 'sdxlturbo_example',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/sdturbo/'
}
]
},
{
moduleName: 'default',
title: 'Area Composition',
type: 'image',
templates: [
{
name: 'area_composition',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/area_composition/'
},
{
name: 'area_composition_reversed',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/area_composition/'
},
{
name: 'area_composition_square_area_for_subject',
mediaType: 'image',
mediaSubtype: 'png',
tutorialUrl:
'https://comfyanonymous.github.io/ComfyUI_examples/area_composition/#increasing-consistency-of-images-with-area-composition'
}
]
},
{
moduleName: 'default',
title: '3D',
type: 'video',
templates: [
{
name: 'stable_zero123_example',
mediaType: 'image',
mediaSubtype: 'webp',
tutorialUrl: 'https://comfyanonymous.github.io/ComfyUI_examples/3d/'
}
]
},
{
moduleName: 'default',
title: 'Audio',
type: 'audio',
templates: [
{
name: 'stable_audio_example',
mediaType: 'audio',
mediaSubtype: 'flac',
tutorialUrl: 'https://comfyanonymous.github.io/ComfyUI_examples/audio/'
}
]
}
]

View File

@@ -360,10 +360,72 @@
"templateWorkflows": {
"title": "Get Started with a Template",
"template": {
"default": "Image Generation",
"image2image": "Image to Image",
"upscale": "2 Pass Upscale",
"flux_schnell": "Flux Schnell"
"Flux": {
"flux_dev_checkpoint_example": "Flux Dev",
"flux_schnell": "Flux Schnell",
"flux_fill_inpaint_example": "Flux Inpaint",
"flux_fill_outpaint_example": "Flux Outpaint",
"flux_canny_model_example": "Flux Canny Model",
"flux_redux_model_example": "Flux Redux Model",
"flux_depth_lora_example": "Flux Depth Lora"
},
"Basics": {
"default": "Image Generation",
"image2image": "Image to Image",
"embedding_example": "Embedding",
"gligen_textbox_example": "Gligen Textbox",
"lora": "Lora",
"lora_multiple": "Lora Multiple",
"inpaint_example": "Inpaint",
"yosemite_outpaint_example": "Yosemite Outpaint",
"inpain_model_outpainting": "Inpaint Model Outpainting"
},
"ControlNet": {
"controlnet_example": "ControlNet",
"2_pass_pose_worship": "2 Pass Pose Worship",
"depth_controlnet": "Depth ControlNet",
"depth_t2i_adapter": "Depth T2I Adapter",
"mixing_controlnets": "Mixing ControlNets"
},
"Upscaling": {
"upscale": "Upscale",
"esrgan_example": "ESRGAN",
"hiresfix_latent_workflow": "HiresFix Latent Workflow",
"hiresfix_esrgan_workflow": "HiresFix ESRGAN Workflow",
"latent_upscale_different_prompt_model": "Latent Upscale Different Prompt Model"
},
"Video": {
"image_to_video": "Image to Video",
"txt_to_image_to_video": "Text to Image to Video",
"ltxv_image_to_video": "LTXV Image to Video",
"ltxv_text_to_video": "LTXV Text to Video",
"mochi_text_to_video_example": "Mochi Text to Video",
"hunyuan_video_text_to_video": "Hunyuan Video Text to Video"
},
"SD3.5": {
"sd3.5_simple_example": "SD3.5 Simple",
"sd3.5_large_canny_controlnet_example": "SD3.5 Large Canny ControlNet",
"sd3.5_large_depth": "SD3.5 Large Depth",
"sd3.5_large_blur": "SD3.5 Large Blur"
},
"SDXL": {
"sdxl_simple_example": "SDXL Simple",
"sdxl_refiner_prompt_example": "SDXL Refiner Prompt",
"sdxl_revision_text_prompts": "SDXL Revision Text Prompts",
"sdxl_revision_zero_positive": "SDXL Revision Zero Positive",
"sdxlturbo_example": "SDXL Turbo"
},
"Comfy - Area Composition": {
"area_composition": "Area Composition",
"area_composition_reversed": "Area Composition Reversed",
"area_composition_square_area_for_subject": "Area Composition Square Area for Subject"
},
"3D": {
"stable_zero123_example": "Stable Zero123"
},
"Audio": {
"stable_audio_example": "Stable Audio"
}
}
},
"graphCanvasMenu": {

View File

@@ -756,10 +756,72 @@
},
"templateWorkflows": {
"template": {
"default": "Génération d'images",
"flux_schnell": "Flux Schnell",
"image2image": "Image à Image",
"upscale": "Upscale en 2 passes"
"3D": {
"stable_zero123_example": "Stable Zero123"
},
"Audio": {
"stable_audio_example": "Stable Audio"
},
"Basics": {
"default": "Génération d'Image",
"embedding_example": "Intégration",
"gligen_textbox_example": "Boîte de Texte Gligen",
"image2image": "Image à Image",
"inpain_model_outpainting": "Modèle Inpaint Outpainting",
"inpaint_example": "Inpaint",
"lora": "Lora",
"lora_multiple": "Lora Multiple",
"yosemite_outpaint_example": "Yosemite Outpaint"
},
"Comfy - Area Composition": {
"area_composition": "Composition de Zone",
"area_composition_reversed": "Composition de Zone Inversée",
"area_composition_square_area_for_subject": "Composition de Zone Carrée pour le Sujet"
},
"ControlNet": {
"2_pass_pose_worship": "2 Passes Pose Worship",
"controlnet_example": "ControlNet",
"depth_controlnet": "Profondeur ControlNet",
"depth_t2i_adapter": "Adaptateur Profondeur T2I",
"mixing_controlnets": "Mélange de ControlNets"
},
"Flux": {
"flux_canny_model_example": "Flux Canny Model",
"flux_depth_lora_example": "Flux Depth Lora",
"flux_dev_example": "Flux Dev",
"flux_fill_inpaint_example": "Flux Inpaint",
"flux_fill_outpaint_example": "Flux Outpaint",
"flux_redux_model_example": "Flux Redux Model",
"flux_schnell": "Flux Schnell"
},
"SD3.5": {
"sd3.5_large_blur": "SD3.5 Grand Flou",
"sd3.5_large_canny_controlnet_example": "SD3.5 Grand Canny ControlNet",
"sd3.5_large_depth": "SD3.5 Grande Profondeur",
"sd3.5_simple_example": "SD3.5 Simple"
},
"SDXL": {
"sdxl_refiner_prompt_example": "SDXL Refiner Prompt",
"sdxl_revision_text_prompts": "Révisions de Texte SDXL",
"sdxl_revision_zero_positive": "Révision Zéro Positive SDXL",
"sdxl_simple_example": "SDXL Simple",
"sdxlturbo_example": "SDXL Turbo"
},
"Upscaling": {
"esrgan_example": "ESRGAN",
"hiresfix_esrgan_workflow": "Flux de Travail ESRGAN HiresFix",
"hiresfix_latent_workflow": "Flux de Travail Latent HiresFix",
"latent_upscale_different_prompt_model": "Modèle d'Agrandissement Latent Différent Prompt",
"upscale": "Agrandissement"
},
"Video": {
"hunyuan_video_text_to_video": "Texte à Vidéo Hunyuan",
"image_to_video": "Image à Vidéo",
"ltxv_image_to_video": "LTXV Image à Vidéo",
"ltxv_text_to_video": "LTXV Texte à Vidéo",
"mochi_text_to_video_example": "Exemple de Texte à Vidéo Mochi",
"txt_to_image_to_video": "Texte à Image à Vidéo"
}
},
"title": "Commencez avec un modèle"
},

View File

@@ -756,10 +756,72 @@
},
"templateWorkflows": {
"template": {
"default": "画像生成",
"flux_schnell": "Flux Schnell",
"image2image": "画像から画像へ",
"upscale": "2パスアップスケール"
"3D": {
"stable_zero123_example": "Stable Zero123"
},
"Audio": {
"stable_audio_example": "Stable Audio"
},
"Basics": {
"default": "画像生成",
"embedding_example": "埋め込み",
"gligen_textbox_example": "Gligenテキストボックス",
"image2image": "画像から画像へ",
"inpain_model_outpainting": "InpaintモデルのOutpainting",
"inpaint_example": "Inpaint",
"lora": "Lora",
"lora_multiple": "Lora複数",
"yosemite_outpaint_example": "Yosemite Outpaint"
},
"Comfy - Area Composition": {
"area_composition": "エリア構成",
"area_composition_reversed": "エリア構成反転",
"area_composition_square_area_for_subject": "主題のためのエリア構成スクエアエリア"
},
"ControlNet": {
"2_pass_pose_worship": "2 Pass Pose Worship",
"controlnet_example": "ControlNet",
"depth_controlnet": "Depth ControlNet",
"depth_t2i_adapter": "Depth T2Iアダプタ",
"mixing_controlnets": "ControlNetsの混合"
},
"Flux": {
"flux_canny_model_example": "Flux Cannyモデル",
"flux_depth_lora_example": "Flux Depth Lora",
"flux_dev_example": "Flux Dev",
"flux_fill_inpaint_example": "Flux Inpaint",
"flux_fill_outpaint_example": "Flux Outpaint",
"flux_redux_model_example": "Flux Reduxモデル",
"flux_schnell": "Flux Schnell"
},
"SD3.5": {
"sd3.5_large_blur": "SD3.5 Large Blur",
"sd3.5_large_canny_controlnet_example": "SD3.5 Large Canny ControlNet",
"sd3.5_large_depth": "SD3.5 Large Depth",
"sd3.5_simple_example": "SD3.5シンプル"
},
"SDXL": {
"sdxl_refiner_prompt_example": "SDXL Refinerプロンプト",
"sdxl_revision_text_prompts": "SDXL Revisionテキストプロンプト",
"sdxl_revision_zero_positive": "SDXL Revisionゼロポジティブ",
"sdxl_simple_example": "SDXLシンプル",
"sdxlturbo_example": "SDXLターボ"
},
"Upscaling": {
"esrgan_example": "ESRGAN",
"hiresfix_esrgan_workflow": "HiresFix ESRGANワークフロー",
"hiresfix_latent_workflow": "HiresFix Latentワークフロー",
"latent_upscale_different_prompt_model": "Latent Upscale異なるプロンプトモデル",
"upscale": "アップスケール"
},
"Video": {
"hunyuan_video_text_to_video": "Hunyuanビデオテキストからビデオへ",
"image_to_video": "画像からビデオへ",
"ltxv_image_to_video": "LTXV画像からビデオへ",
"ltxv_text_to_video": "LTXVテキストからビデオへ",
"mochi_text_to_video_example": "Mochiテキストからビデオへ",
"txt_to_image_to_video": "テキストから画像へ、画像からビデオへ"
}
},
"title": "テンプレートを利用して開始"
},

View File

@@ -756,10 +756,72 @@
},
"templateWorkflows": {
"template": {
"default": "이미지 생성",
"flux_schnell": "FLUX Schenll",
"image2image": "이미지에서 이미지로",
"upscale": "2단계 업스케일"
"3D": {
"stable_zero123_example": "Stable Zero123"
},
"Audio": {
"stable_audio_example": "Stable Audio"
},
"Basics": {
"default": "이미지 생성",
"embedding_example": "임베딩",
"gligen_textbox_example": "Gligen 텍스트박스",
"image2image": "이미지에서 이미지로",
"inpain_model_outpainting": "Inpaint 모델 Outpainting",
"inpaint_example": "Inpaint",
"lora": "Lora",
"lora_multiple": "Lora 다중",
"yosemite_outpaint_example": "Yosemite Outpaint"
},
"Comfy - Area Composition": {
"area_composition": "영역 구성",
"area_composition_reversed": "영역 구성 반전",
"area_composition_square_area_for_subject": "주제에 대한 영역 구성 사각형"
},
"ControlNet": {
"2_pass_pose_worship": "2 패스 포즈 워십",
"controlnet_example": "ControlNet",
"depth_controlnet": "Depth ControlNet",
"depth_t2i_adapter": "Depth T2I 어댑터",
"mixing_controlnets": "ControlNets 혼합"
},
"Flux": {
"flux_canny_model_example": "Flux Canny 모델",
"flux_depth_lora_example": "Flux Depth Lora",
"flux_dev_example": "Flux 개발",
"flux_fill_inpaint_example": "Flux Inpaint",
"flux_fill_outpaint_example": "Flux Outpaint",
"flux_redux_model_example": "Flux Redux 모델",
"flux_schnell": "Flux Schnell"
},
"SD3.5": {
"sd3.5_large_blur": "SD3.5 큰 Blur",
"sd3.5_large_canny_controlnet_example": "SD3.5 큰 Canny ControlNet",
"sd3.5_large_depth": "SD3.5 큰 Depth",
"sd3.5_simple_example": "SD3.5 간단한 예"
},
"SDXL": {
"sdxl_refiner_prompt_example": "SDXL Refiner 프롬프트",
"sdxl_revision_text_prompts": "SDXL Revision 텍스트 프롬프트",
"sdxl_revision_zero_positive": "SDXL Revision Zero Positive",
"sdxl_simple_example": "SDXL 간단한 예",
"sdxlturbo_example": "SDXL Turbo"
},
"Upscaling": {
"esrgan_example": "ESRGAN",
"hiresfix_esrgan_workflow": "HiresFix ESRGAN 워크플로우",
"hiresfix_latent_workflow": "HiresFix Latent 워크플로우",
"latent_upscale_different_prompt_model": "Latent Upscale 다른 프롬프트 모델",
"upscale": "업스케일"
},
"Video": {
"hunyuan_video_text_to_video": "Hunyuan 비디오 텍스트에서 비디오로",
"image_to_video": "이미지에서 비디오로",
"ltxv_image_to_video": "LTXV 이미지에서 비디오로",
"ltxv_text_to_video": "LTXV 텍스트에서 비디오로",
"mochi_text_to_video_example": "Mochi 텍스트에서 비디오로",
"txt_to_image_to_video": "텍스트에서 이미지로, 그리고 비디오로"
}
},
"title": "템플릿으로 시작하기"
},

View File

@@ -756,10 +756,72 @@
},
"templateWorkflows": {
"template": {
"default": "Генерация изображений",
"flux_schnell": "Flux Schnell",
"image2image": "Изображение в изображение",
"upscale": "2-этапный апскейл"
"3D": {
"stable_zero123_example": "Stable Zero123"
},
"Audio": {
"stable_audio_example": "Stable Audio"
},
"Basics": {
"default": "Генерация изображений",
"embedding_example": "Встраивание",
"gligen_textbox_example": "Gligen Textbox",
"image2image": "Изображение в изображение",
"inpain_model_outpainting": "Inpaint Model Outpainting",
"inpaint_example": "Inpaint",
"lora": "Lora",
"lora_multiple": "Lora Multiple",
"yosemite_outpaint_example": "Yosemite Outpaint"
},
"Comfy - Area Composition": {
"area_composition": "Композиция области",
"area_composition_reversed": "Обратная композиция области",
"area_composition_square_area_for_subject": "Композиция области квадратной области для субъекта"
},
"ControlNet": {
"2_pass_pose_worship": "2 Pass Pose Worship",
"controlnet_example": "ControlNet",
"depth_controlnet": "Depth ControlNet",
"depth_t2i_adapter": "Depth T2I Adapter",
"mixing_controlnets": "Mixing ControlNets"
},
"Flux": {
"flux_canny_model_example": "Flux Canny Model",
"flux_depth_lora_example": "Flux Depth Lora",
"flux_dev_example": "Flux Dev",
"flux_fill_inpaint_example": "Flux Inpaint",
"flux_fill_outpaint_example": "Flux Outpaint",
"flux_redux_model_example": "Flux Redux Model",
"flux_schnell": "Flux Schnell"
},
"SD3.5": {
"sd3.5_large_blur": "SD3.5 Large Blur",
"sd3.5_large_canny_controlnet_example": "SD3.5 Large Canny ControlNet",
"sd3.5_large_depth": "SD3.5 Large Depth",
"sd3.5_simple_example": "SD3.5 Simple"
},
"SDXL": {
"sdxl_refiner_prompt_example": "SDXL Refiner Prompt",
"sdxl_revision_text_prompts": "SDXL Revision Text Prompts",
"sdxl_revision_zero_positive": "SDXL Revision Zero Positive",
"sdxl_simple_example": "SDXL Simple",
"sdxlturbo_example": "SDXL Turbo"
},
"Upscaling": {
"esrgan_example": "ESRGAN",
"hiresfix_esrgan_workflow": "HiresFix ESRGAN Workflow",
"hiresfix_latent_workflow": "HiresFix Latent Workflow",
"latent_upscale_different_prompt_model": "Latent Upscale Different Prompt Model",
"upscale": "Увеличение"
},
"Video": {
"hunyuan_video_text_to_video": "Hunyuan Video Text to Video",
"image_to_video": "Изображение в видео",
"ltxv_image_to_video": "LTXV Image to Video",
"ltxv_text_to_video": "LTXV Text to Video",
"mochi_text_to_video_example": "Mochi Text to Video",
"txt_to_image_to_video": "Текст в изображение в видео"
}
},
"title": "Начните с шаблона"
},

View File

@@ -756,10 +756,72 @@
},
"templateWorkflows": {
"template": {
"default": "默认",
"flux_schnell": "Flux Schnell",
"image2image": "图像到图像",
"upscale": "2步放大"
"3D": {
"stable_zero123_example": "稳定Zero123"
},
"Audio": {
"stable_audio_example": "稳定音频"
},
"Basics": {
"default": "图像生成",
"embedding_example": "嵌入",
"gligen_textbox_example": "Gligen文本框",
"image2image": "图像到图像",
"inpain_model_outpainting": "Inpaint模型Outpainting",
"inpaint_example": "Inpaint",
"lora": "Lora",
"lora_multiple": "Lora多个",
"yosemite_outpaint_example": "优胜美地Outpaint"
},
"Comfy - Area Composition": {
"area_composition": "区域构成",
"area_composition_reversed": "区域构成反转",
"area_composition_square_area_for_subject": "主题的区域构成方形区域"
},
"ControlNet": {
"2_pass_pose_worship": "2通道姿势崇拜",
"controlnet_example": "ControlNet",
"depth_controlnet": "深度ControlNet",
"depth_t2i_adapter": "深度T2I适配器",
"mixing_controlnets": "混合ControlNets"
},
"Flux": {
"flux_canny_model_example": "Flux Canny Model",
"flux_depth_lora_example": "Flux Depth Lora",
"flux_dev_example": "Flux Dev",
"flux_fill_inpaint_example": "Flux Inpaint",
"flux_fill_outpaint_example": "Flux Outpaint",
"flux_redux_model_example": "Flux Redux Model",
"flux_schnell": "Flux Schnell"
},
"SD3.5": {
"sd3.5_large_blur": "SD3.5大模糊",
"sd3.5_large_canny_controlnet_example": "SD3.5大型Canny ControlNet",
"sd3.5_large_depth": "SD3.5大深度",
"sd3.5_simple_example": "SD3.5简单"
},
"SDXL": {
"sdxl_refiner_prompt_example": "SDXL Refiner提示",
"sdxl_revision_text_prompts": "SDXL修订文本提示",
"sdxl_revision_zero_positive": "SDXL修订零正",
"sdxl_simple_example": "SDXL简单",
"sdxlturbo_example": "SDXL Turbo"
},
"Upscaling": {
"esrgan_example": "ESRGAN",
"hiresfix_esrgan_workflow": "HiresFix ESRGAN工作流",
"hiresfix_latent_workflow": "HiresFix潜在工作流",
"latent_upscale_different_prompt_model": "潜在升级不同提示模型",
"upscale": "升级"
},
"Video": {
"hunyuan_video_text_to_video": "Hunyuan视频文本到视频",
"image_to_video": "图像到视频",
"ltxv_image_to_video": "LTXV图像到视频",
"ltxv_text_to_video": "LTXV文本到视频",
"mochi_text_to_video_example": "Mochi文本到视频",
"txt_to_image_to_video": "文本到图像到视频"
}
},
"title": "从模板开始"
},

View File

@@ -1,20 +1,48 @@
import { groupBy } from 'lodash'
import { defineStore } from 'pinia'
import { ref } from 'vue'
import { computed, ref, shallowRef } from 'vue'
import { CORE_TEMPLATES } from '@/constants/coreTemplates'
import { api } from '@/scripts/api'
import type {
TemplateGroup,
WorkflowTemplates
} from '@/types/workflowTemplateTypes'
export const useWorkflowTemplatesStore = defineStore(
'workflowTemplates',
() => {
const items = ref<{
[customNodesName: string]: string[]
}>({})
const customTemplates = shallowRef<{ [moduleName: string]: string[] }>({})
const isLoaded = ref(false)
const defaultTemplate: WorkflowTemplates = CORE_TEMPLATES[0]
const groupedTemplates = computed<TemplateGroup[]>(() => {
const allTemplates = [
...CORE_TEMPLATES,
...Object.entries(customTemplates.value).map(
([moduleName, templates]) => ({
moduleName,
title: moduleName,
templates: templates.map((name) => ({
name,
mediaType: 'image',
mediaSubtype: 'jpg'
}))
})
)
]
return Object.entries(
groupBy(allTemplates, (t) =>
t.moduleName === 'default' ? 'ComfyUI Examples' : 'Custom Nodes'
)
).map(([label, modules]) => ({ label, modules }))
})
async function loadWorkflowTemplates() {
try {
if (!isLoaded.value) {
items.value = await api.getWorkflowTemplates()
customTemplates.value = await api.getWorkflowTemplates()
isLoaded.value = true
}
} catch (error) {
@@ -23,7 +51,8 @@ export const useWorkflowTemplatesStore = defineStore(
}
return {
items,
groupedTemplates,
defaultTemplate,
isLoaded,
loadWorkflowTemplates
}

View File

@@ -0,0 +1,16 @@
export interface TemplateInfo {
name: string
tutorialUrl?: string
mediaType: string
mediaSubtype: string
}
export interface WorkflowTemplates {
moduleName: string
templates: TemplateInfo[]
title: string
}
export interface TemplateGroup {
label: string
modules: WorkflowTemplates[]
}