Remove deprecated model JSON files and add methods to retrieve samplers and schedulers from object info

This commit is contained in:
Menno van Leeuwen 2025-03-20 16:01:55 +00:00
parent 41f85ae3c5
commit c1c508215e
13 changed files with 189 additions and 14049 deletions

View File

@ -1,88 +0,0 @@
[
{
"name": "checkpoints",
"folders": [
"/stable-diffusion/models/checkpoints",
"/data/models/Stable-diffusion",
"/stable-diffusion/output/checkpoints"
]
},
{
"name": "loras",
"folders": [
"/stable-diffusion/models/loras",
"/data/models/Lora",
"/stable-diffusion/output/loras"
]
},
{
"name": "vae",
"folders": [
"/stable-diffusion/models/vae",
"/data/models/VAE",
"/stable-diffusion/output/vae"
]
},
{
"name": "text_encoders",
"folders": [
"/stable-diffusion/models/text_encoders",
"/stable-diffusion/models/clip",
"/data/models/CLIPEncoder",
"/stable-diffusion/output/clip"
]
},
{
"name": "diffusion_models",
"folders": [
"/stable-diffusion/models/unet",
"/stable-diffusion/models/diffusion_models",
"/stable-diffusion/output/diffusion_models"
]
},
{
"name": "clip_vision",
"folders": ["/stable-diffusion/models/clip_vision"]
},
{
"name": "style_models",
"folders": ["/stable-diffusion/models/style_models"]
},
{
"name": "embeddings",
"folders": ["/stable-diffusion/models/embeddings", "/data/embeddings"]
},
{ "name": "diffusers", "folders": ["/stable-diffusion/models/diffusers"] },
{ "name": "vae_approx", "folders": ["/stable-diffusion/models/vae_approx"] },
{
"name": "controlnet",
"folders": [
"/stable-diffusion/models/controlnet",
"/stable-diffusion/models/t2i_adapter",
"/data/models/ControlNet"
]
},
{
"name": "gligen",
"folders": ["/stable-diffusion/models/gligen", "/data/models/GLIGEN"]
},
{
"name": "upscale_models",
"folders": [
"/stable-diffusion/models/upscale_models",
"/data/models/RealESRGAN",
"/data/models/ESRGAN",
"/data/models/SwinIR",
"/data/models/GFPGAN"
]
},
{
"name": "hypernetworks",
"folders": [
"/stable-diffusion/models/hypernetworks",
"/data/models/hypernetworks"
]
},
{ "name": "photomaker", "folders": ["/stable-diffusion/models/photomaker"] },
{ "name": "classifiers", "folders": ["/stable-diffusion/models/classifiers"] }
]

View File

@ -1,31 +0,0 @@
[
{ "name": "prefectPonyXL_v3.safetensors", "pathIndex": 1 },
{ "name": "sd-v1-5-inpainting.ckpt", "pathIndex": 1 },
{ "name": "v1-5-pruned-emaonly.ckpt", "pathIndex": 1 },
{ "name": "Semi-realism/bemypony_Semirealanime.safetensors", "pathIndex": 1 },
{ "name": "Semi-realism/duchaitenPonyXLNo_v60.safetensors", "pathIndex": 1 },
{ "name": "FLUX/flux1-dev-fp8.safetensors", "pathIndex": 1 },
{ "name": "Realism/cyberrealisticPony_v70a.safetensors", "pathIndex": 1 },
{ "name": "Realism/cyberrealisticPony_v8.safetensors", "pathIndex": 1 },
{ "name": "Realism/realvisxlV50_v50Bakedvae.safetensors", "pathIndex": 1 },
{
"name": "Anime/autismmixSDXL_autismmixConfetti.safetensors",
"pathIndex": 1
},
{ "name": "Anime/autismmixSDXL_autismmixPony.safetensors", "pathIndex": 1 },
{
"name": "Anime/ponyDiffusionV6XL_v6StartWithThisOne.safetensors",
"pathIndex": 1
},
{ "name": "Anime/prefectPonyXL_v50.safetensors", "pathIndex": 1 },
{ "name": "Anime/waiANINSFWPONYXL_v11.safetensors", "pathIndex": 1 },
{ "name": "Anime/waiANINSFWPONYXL_v130.safetensors", "pathIndex": 1 },
{ "name": "Anime/waiNSFWIllustrious_v70.safetensors", "pathIndex": 1 },
{ "name": "RDXL/rdxlAnime_sdxlPony8.safetensors", "pathIndex": 1 },
{ "name": "RDXL/rdxlPixelArt_pony2.safetensors", "pathIndex": 1 },
{ "name": "RDXL/realDream_sdxlPony12.safetensors", "pathIndex": 1 },
{ "name": "SD3.5/sd3.5_large_fp16.safetensors", "pathIndex": 1 },
{ "name": "SD3.5/sd3.5_large_fp8_scaled.safetensors", "pathIndex": 1 },
{ "name": "Babes/babesBYSTABLEYOGI_xlV2.safetensors", "pathIndex": 1 },
{ "name": "Babes/babesByStableYogi_ponyV3VAE.safetensors", "pathIndex": 1 }
]

View File

@ -1,101 +0,0 @@
[
{ "name": "Expressive_H-000001.safetensors", "pathIndex": 1 },
{ "name": "Hand v2.safetensors", "pathIndex": 1 },
{ "name": "LogoRedmondV2-Logo-LogoRedmAF.safetensors", "pathIndex": 1 },
{ "name": "WowifierXL-V2.safetensors", "pathIndex": 1 },
{ "name": "detailed_notrigger.safetensors", "pathIndex": 1 },
{ "name": "detailxl.safetensors", "pathIndex": 1 },
{ "name": "Citron Pony Styles/80s_Pop_PDXL.safetensors", "pathIndex": 1 },
{ "name": "Citron Pony Styles/Alola_Style_PDXL.safetensors", "pathIndex": 1 },
{ "name": "Citron Pony Styles/BoldToon.safetensors", "pathIndex": 1 },
{
"name": "Citron Pony Styles/CandyCuteStylePDXL.safetensors",
"pathIndex": 1
},
{
"name": "Citron Pony Styles/CatalystStylePDXL.safetensors",
"pathIndex": 1
},
{ "name": "Citron Pony Styles/Citron3D_PDXL.safetensors", "pathIndex": 1 },
{
"name": "Citron Pony Styles/CitronAnimeTreasure-07.safetensors",
"pathIndex": 1
},
{ "name": "Citron Pony Styles/EnergyCAT.safetensors", "pathIndex": 1 },
{ "name": "Citron Pony Styles/FlatAnimeP1.safetensors", "pathIndex": 1 },
{ "name": "Citron Pony Styles/LunarCAT_Style.safetensors", "pathIndex": 1 },
{ "name": "Citron Pony Styles/RealisticAnime.safetensors", "pathIndex": 1 },
{ "name": "Citron Pony Styles/Smooth.safetensors", "pathIndex": 1 },
{ "name": "Citron Pony Styles/Vivid.safetensors", "pathIndex": 1 },
{ "name": "Vixon's Pony Styles/Sh4rd4n1cXLP.safetensors", "pathIndex": 1 },
{ "name": "Vixon's Pony Styles/ch33s3XLP.safetensors", "pathIndex": 1 },
{ "name": "Vixon's Pony Styles/itsyelizXLP.safetensors", "pathIndex": 1 },
{ "name": "Vixon's Pony Styles/lalangheejXLP.safetensors", "pathIndex": 1 },
{ "name": "Vixon's Pony Styles/nikkileeismeXLP.safetensors", "pathIndex": 1 },
{ "name": "Vixon's Pony Styles/tomidoronXLP.safetensors", "pathIndex": 1 },
{ "name": "Characters/princess_xl_v2.safetensors", "pathIndex": 1 },
{ "name": "Characters/princess_zelda.safetensors", "pathIndex": 1 },
{
"name": "Characters/Peni Parker/32dim-MR_PeniParker-PONY.safetensors",
"pathIndex": 1
},
{
"name": "Characters/Peni Parker/PeniParkerRivals-10.safetensors",
"pathIndex": 1
},
{
"name": "Characters/Peni Parker/Peni_Parker-000007.safetensors",
"pathIndex": 1
},
{
"name": "Characters/Peni Parker/Peni_parker_marvel_rivels.safetensors",
"pathIndex": 1
},
{
"name": "Characters/Cortana/Cortana(revAnimated).safetensors",
"pathIndex": 1
},
{ "name": "Characters/Cortana/Cortana.safetensors", "pathIndex": 1 },
{ "name": "Characters/Cortana/Cortana_XL.safetensors", "pathIndex": 1 },
{ "name": "Characters/Cortana/cortana_xl_v3.safetensors", "pathIndex": 1 },
{
"name": "Characters/Widowmaker/SDXL_ow1 Windowmaker.safetensors",
"pathIndex": 1
},
{
"name": "Characters/Widowmaker/WidowmakerPonyLoRA.safetensors",
"pathIndex": 1
},
{
"name": "Characters/Widowmaker/Widowmaker_cgi.safetensors",
"pathIndex": 1
},
{ "name": "Characters/Lara Croft/ClassicLara.safetensors", "pathIndex": 1 },
{
"name": "Characters/Lara Croft/LaraCroft_character-20.safetensors",
"pathIndex": 1
},
{
"name": "Characters/Lara Croft/lara_croft_xl_v2.safetensors",
"pathIndex": 1
},
{
"name": "Characters/Samus Aran/Samus AranPonyLora.safetensors",
"pathIndex": 1
},
{ "name": "Characters/Samus Aran/samus aran.safetensors", "pathIndex": 1 },
{ "name": "Characters/Samus Aran/samus-09.safetensors", "pathIndex": 1 },
{
"name": "Characters/D.va/DVaOWXL - by KillerUwU13_AI.safetensors",
"pathIndex": 1
},
{ "name": "Characters/D.va/DVaPony.safetensors", "pathIndex": 1 },
{
"name": "Characters/Scarlett Johansson/Scarlett-v20.safetensors",
"pathIndex": 1
},
{
"name": "Characters/Scarlett Johansson/Scarlett4.safetensors",
"pathIndex": 1
}
]

View File

@ -1,10 +0,0 @@
[
{ "name": "RealESRGAN_x4plus.pth", "pathIndex": 1 },
{ "name": "RealESRGAN_x4plus_anime_6B.pth", "pathIndex": 1 },
{ "name": "4x-AnimeSharp.pth", "pathIndex": 2 },
{ "name": "4x-UltraSharp.pth", "pathIndex": 2 },
{ "name": "4xNMKDSuperscale_4xNMKDSuperscale.pt", "pathIndex": 2 },
{ "name": "ESRGAN_4x.pth", "pathIndex": 2 },
{ "name": "SwinIR_4x.pth", "pathIndex": 3 },
{ "name": "GFPGANv1.4.pth", "pathIndex": 4 }
]

View File

@ -1,5 +0,0 @@
[
{ "name": "ae.safetensors", "pathIndex": 1 },
{ "name": "sdxl_vae.safetensors", "pathIndex": 1 },
{ "name": "vae-ft-mse-840000-ema-pruned.ckpt", "pathIndex": 1 }
]

File diff suppressed because it is too large Load Diff

View File

@ -1,12 +0,0 @@
{
"modelspec.hash_sha256": "0x2f3c5caac0469f474439cf84eb09f900bd8e5900f4ad9404c4e05cec12314df6",
"modelspec.date": "2024-08-01",
"modelspec.sai_model_spec": "1.0.1",
"modelspec.author": "Black Forest Labs",
"modelspec.architecture": "Flux.1-dev",
"modelspec.license": "FLUX.1 [dev] Non-Commercial License",
"modelspec.implementation": "https://github.com/black-forest-labs/flux",
"modelspec.thumbnail": "data:image/jpeg;base64,TRUNCATED",
"modelspec.title": "Flux.1-dev",
"modelspec.description": "A guidance distilled rectified flow model."
}

File diff suppressed because one or more lines are too long

View File

@ -1,11 +0,0 @@
{
"modelspec.architecture": "Flux.1-AE",
"modelspec.title": "Flux.1 Autoencoder",
"modelspec.author": "Black Forest Labs",
"modelspec.description": "The autoencoder for the Flux.1 model family",
"modelspec.implementation": "https://github.com/black-forest-labs/flux",
"modelspec.date": "2024-08-01",
"modelspec.license": "Apache License 2.0",
"modelspec.hash_sha256": "0xddec9c299f56c1178e6281a12167f2ebec9aa4de8fce81e234a687bb231d5b6d",
"modelspec.sai_model_spec": "1.0.1"
}

File diff suppressed because it is too large Load Diff

View File

@ -1,768 +0,0 @@
{
"queue_running": [
[
6,
"97f9479d-4a5b-40d3-a71f-a75d3aadacdb",
{
"3": {
"inputs": {
"seed": 1042361530597518,
"steps": 20,
"cfg": 8.0,
"sampler_name": "euler",
"scheduler": "normal",
"denoise": 1.0,
"model": ["4", 0],
"positive": ["6", 0],
"negative": ["7", 0],
"latent_image": ["5", 0]
},
"class_type": "KSampler",
"_meta": { "title": "KSampler" }
},
"4": {
"inputs": {
"ckpt_name": "Anime/autismmixSDXL_autismmixConfetti.safetensors"
},
"class_type": "CheckpointLoaderSimple",
"_meta": { "title": "Load Checkpoint" }
},
"5": {
"inputs": { "width": 512, "height": 512, "batch_size": 1 },
"class_type": "EmptyLatentImage",
"_meta": { "title": "Empty Latent Image" }
},
"6": {
"inputs": {
"text": "beautiful scenery nature glass bottle landscape, , purple galaxy bottle,",
"clip": ["4", 1]
},
"class_type": "CLIPTextEncode",
"_meta": { "title": "CLIP Text Encode (Prompt)" }
},
"7": {
"inputs": { "text": "text, watermark", "clip": ["4", 1] },
"class_type": "CLIPTextEncode",
"_meta": { "title": "CLIP Text Encode (Prompt)" }
},
"8": {
"inputs": { "samples": ["3", 0], "vae": ["4", 2] },
"class_type": "VAEDecode",
"_meta": { "title": "VAE Decode" }
},
"9": {
"inputs": { "filename_prefix": "ComfyUI", "images": ["8", 0] },
"class_type": "SaveImage",
"_meta": { "title": "Save Image" }
}
},
{
"extra_pnginfo": {
"workflow": {
"last_node_id": 9,
"last_link_id": 9,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.27801513671875, 180.6060791015625],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [{ "name": "clip", "type": "CLIP", "link": 5 }],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "CLIPTextEncode" },
"widgets_values": ["text, watermark"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.84503173828125, 164.31304931640625],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [{ "name": "clip", "type": "CLIP", "link": 3 }],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "CLIPTextEncode" },
"widgets_values": [
"beautiful scenery nature glass bottle landscape, , purple galaxy bottle,"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "EmptyLatentImage" },
"widgets_values": [512, 512, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{ "name": "model", "type": "MODEL", "link": 1 },
{ "name": "positive", "type": "CONDITIONING", "link": 4 },
{ "name": "negative", "type": "CONDITIONING", "link": 6 },
{ "name": "latent_image", "type": "LATENT", "link": 2 }
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "KSampler" },
"widgets_values": [
1042361530597518,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{ "name": "samples", "type": "LATENT", "link": 7 },
{ "name": "vae", "type": "VAE", "link": 8 }
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "VAEDecode" },
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [210, 270],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [{ "name": "images", "type": "IMAGE", "link": 9 }],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [26, 474],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": { "Node name for S&R": "CheckpointLoaderSimple" },
"widgets_values": [
"Anime/autismmixSDXL_autismmixConfetti.safetensors"
]
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[3, 4, 1, 6, 0, "CLIP"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[5, 4, 1, 7, 0, "CLIP"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.9090909090909091,
"offset": [114.45999999999988, -114.63999999999956]
}
},
"version": 0.4
}
},
"client_id": "eda4bf07c812424dbb9e964c5e000ade"
},
["9"]
]
],
"queue_pending": [
[
7,
"09e094b9-51c2-4355-a994-9c7272310d76",
{
"3": {
"inputs": {
"seed": 344828775410837,
"steps": 20,
"cfg": 8.0,
"sampler_name": "euler",
"scheduler": "normal",
"denoise": 1.0,
"model": ["4", 0],
"positive": ["6", 0],
"negative": ["7", 0],
"latent_image": ["5", 0]
},
"class_type": "KSampler",
"_meta": { "title": "KSampler" }
},
"4": {
"inputs": {
"ckpt_name": "Anime/autismmixSDXL_autismmixConfetti.safetensors"
},
"class_type": "CheckpointLoaderSimple",
"_meta": { "title": "Load Checkpoint" }
},
"5": {
"inputs": { "width": 512, "height": 512, "batch_size": 1 },
"class_type": "EmptyLatentImage",
"_meta": { "title": "Empty Latent Image" }
},
"6": {
"inputs": {
"text": "beautiful scenery nature glass bottle landscape, , purple galaxy bottle,",
"clip": ["4", 1]
},
"class_type": "CLIPTextEncode",
"_meta": { "title": "CLIP Text Encode (Prompt)" }
},
"7": {
"inputs": { "text": "text, watermark", "clip": ["4", 1] },
"class_type": "CLIPTextEncode",
"_meta": { "title": "CLIP Text Encode (Prompt)" }
},
"8": {
"inputs": { "samples": ["3", 0], "vae": ["4", 2] },
"class_type": "VAEDecode",
"_meta": { "title": "VAE Decode" }
},
"9": {
"inputs": { "filename_prefix": "ComfyUI", "images": ["8", 0] },
"class_type": "SaveImage",
"_meta": { "title": "Save Image" }
}
},
{
"extra_pnginfo": {
"workflow": {
"last_node_id": 9,
"last_link_id": 9,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.27801513671875, 180.6060791015625],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [{ "name": "clip", "type": "CLIP", "link": 5 }],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "CLIPTextEncode" },
"widgets_values": ["text, watermark"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.84503173828125, 164.31304931640625],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [{ "name": "clip", "type": "CLIP", "link": 3 }],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "CLIPTextEncode" },
"widgets_values": [
"beautiful scenery nature glass bottle landscape, , purple galaxy bottle,"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "EmptyLatentImage" },
"widgets_values": [512, 512, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{ "name": "model", "type": "MODEL", "link": 1 },
{ "name": "positive", "type": "CONDITIONING", "link": 4 },
{ "name": "negative", "type": "CONDITIONING", "link": 6 },
{ "name": "latent_image", "type": "LATENT", "link": 2 }
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "KSampler" },
"widgets_values": [
344828775410837,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{ "name": "samples", "type": "LATENT", "link": 7 },
{ "name": "vae", "type": "VAE", "link": 8 }
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "VAEDecode" },
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [210, 270],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [{ "name": "images", "type": "IMAGE", "link": 9 }],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [26, 474],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": { "Node name for S&R": "CheckpointLoaderSimple" },
"widgets_values": [
"Anime/autismmixSDXL_autismmixConfetti.safetensors"
]
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[3, 4, 1, 6, 0, "CLIP"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[5, 4, 1, 7, 0, "CLIP"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.9090909090909091,
"offset": [114.45999999999988, -114.63999999999956]
}
},
"version": 0.4
}
},
"client_id": "eda4bf07c812424dbb9e964c5e000ade"
},
["9"]
],
[
8,
"6dbfc8a9-f672-47f6-9bcd-f43e571e280a",
{
"3": {
"inputs": {
"seed": 255741014898185,
"steps": 20,
"cfg": 8.0,
"sampler_name": "euler",
"scheduler": "normal",
"denoise": 1.0,
"model": ["4", 0],
"positive": ["6", 0],
"negative": ["7", 0],
"latent_image": ["5", 0]
},
"class_type": "KSampler",
"_meta": { "title": "KSampler" }
},
"4": {
"inputs": {
"ckpt_name": "Anime/autismmixSDXL_autismmixConfetti.safetensors"
},
"class_type": "CheckpointLoaderSimple",
"_meta": { "title": "Load Checkpoint" }
},
"5": {
"inputs": { "width": 512, "height": 512, "batch_size": 1 },
"class_type": "EmptyLatentImage",
"_meta": { "title": "Empty Latent Image" }
},
"6": {
"inputs": {
"text": "beautiful scenery nature glass bottle landscape, , purple galaxy bottle,",
"clip": ["4", 1]
},
"class_type": "CLIPTextEncode",
"_meta": { "title": "CLIP Text Encode (Prompt)" }
},
"7": {
"inputs": { "text": "text, watermark", "clip": ["4", 1] },
"class_type": "CLIPTextEncode",
"_meta": { "title": "CLIP Text Encode (Prompt)" }
},
"8": {
"inputs": { "samples": ["3", 0], "vae": ["4", 2] },
"class_type": "VAEDecode",
"_meta": { "title": "VAE Decode" }
},
"9": {
"inputs": { "filename_prefix": "ComfyUI", "images": ["8", 0] },
"class_type": "SaveImage",
"_meta": { "title": "Save Image" }
}
},
{
"extra_pnginfo": {
"workflow": {
"last_node_id": 9,
"last_link_id": 9,
"nodes": [
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [413, 389],
"size": [425.27801513671875, 180.6060791015625],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [{ "name": "clip", "type": "CLIP", "link": 5 }],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [6],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "CLIPTextEncode" },
"widgets_values": ["text, watermark"]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [415, 186],
"size": [422.84503173828125, 164.31304931640625],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [{ "name": "clip", "type": "CLIP", "link": 3 }],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"links": [4],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "CLIPTextEncode" },
"widgets_values": [
"beautiful scenery nature glass bottle landscape, , purple galaxy bottle,"
]
},
{
"id": 5,
"type": "EmptyLatentImage",
"pos": [473, 609],
"size": [315, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "EmptyLatentImage" },
"widgets_values": [512, 512, 1]
},
{
"id": 3,
"type": "KSampler",
"pos": [863, 186],
"size": [315, 262],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [
{ "name": "model", "type": "MODEL", "link": 1 },
{ "name": "positive", "type": "CONDITIONING", "link": 4 },
{ "name": "negative", "type": "CONDITIONING", "link": 6 },
{ "name": "latent_image", "type": "LATENT", "link": 2 }
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [7],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "KSampler" },
"widgets_values": [
255741014898185,
"randomize",
20,
8,
"euler",
"normal",
1
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [1209, 188],
"size": [210, 46],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [
{ "name": "samples", "type": "LATENT", "link": 7 },
{ "name": "vae", "type": "VAE", "link": 8 }
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [9],
"slot_index": 0
}
],
"properties": { "Node name for S&R": "VAEDecode" },
"widgets_values": []
},
{
"id": 9,
"type": "SaveImage",
"pos": [1451, 189],
"size": [210, 270],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [{ "name": "images", "type": "IMAGE", "link": 9 }],
"outputs": [],
"properties": {},
"widgets_values": ["ComfyUI"]
},
{
"id": 4,
"type": "CheckpointLoaderSimple",
"pos": [26, 474],
"size": [315, 98],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [1],
"slot_index": 0
},
{
"name": "CLIP",
"type": "CLIP",
"links": [3, 5],
"slot_index": 1
},
{
"name": "VAE",
"type": "VAE",
"links": [8],
"slot_index": 2
}
],
"properties": { "Node name for S&R": "CheckpointLoaderSimple" },
"widgets_values": [
"Anime/autismmixSDXL_autismmixConfetti.safetensors"
]
}
],
"links": [
[1, 4, 0, 3, 0, "MODEL"],
[2, 5, 0, 3, 3, "LATENT"],
[3, 4, 1, 6, 0, "CLIP"],
[4, 6, 0, 3, 1, "CONDITIONING"],
[5, 4, 1, 7, 0, "CLIP"],
[6, 7, 0, 3, 2, "CONDITIONING"],
[7, 3, 0, 8, 0, "LATENT"],
[8, 4, 2, 8, 1, "VAE"],
[9, 8, 0, 9, 0, "IMAGE"]
],
"groups": [],
"config": {},
"extra": {
"ds": {
"scale": 0.9090909090909091,
"offset": [114.45999999999988, -114.63999999999956]
}
},
"version": 0.4
}
},
"client_id": "eda4bf07c812424dbb9e964c5e000ade"
},
["9"]
]
]
}

View File

@ -186,6 +186,33 @@ class ComfyUiApi {
return jsonDecode(response.body);
}
/// Gets a list of possible samplers from the object info
Future<List<String>> getKSamplers() async {
final objectInfo = await getObjectInfo();
if (objectInfo.containsKey('KSampler') &&
objectInfo['KSampler']['input']['required']
.containsKey('sampler_name')) {
return List<String>.from(
objectInfo['KSampler']['input']['required']['sampler_name'][0]);
}
throw ComfyUiApiException(
statusCode: 500,
message: 'KSampler information not found in object info.');
}
/// Gets a list of possible schedulers from the object info
Future<List<String>> getSchedulers() async {
final objectInfo = await getObjectInfo();
if (objectInfo.containsKey('KSampler') &&
objectInfo['KSampler']['input']['required'].containsKey('scheduler')) {
return List<String>.from(
objectInfo['KSampler']['input']['required']['scheduler'][0]);
}
throw ComfyUiApiException(
statusCode: 500,
message: 'Scheduler information not found in object info.');
}
/// Submits a prompt (workflow) to generate an image
Future<SubmitPromptResponse> submitPrompt(Map<String, dynamic> prompt) async {
final response = await _httpClient.post(

View File

@ -0,0 +1,162 @@
{
"KSampler": {
"input": {
"required": {
"model": [
"MODEL",
{
"tooltip": "The model used for denoising the input latent."
}
],
"seed": [
"INT",
{
"default": 0,
"min": 0,
"max": 18446744073709551615,
"control_after_generate": true,
"tooltip": "The random seed used for creating the noise."
}
],
"steps": [
"INT",
{
"default": 20,
"min": 1,
"max": 10000,
"tooltip": "The number of steps used in the denoising process."
}
],
"cfg": [
"FLOAT",
{
"default": 8.0,
"min": 0.0,
"max": 100.0,
"step": 0.1,
"round": 0.01,
"tooltip": "The Classifier-Free Guidance scale balances creativity and adherence to the prompt. Higher values result in images more closely matching the prompt however too high values will negatively impact quality."
}
],
"sampler_name": [
[
"euler",
"euler_cfg_pp",
"euler_ancestral",
"euler_ancestral_cfg_pp",
"heun",
"heunpp2",
"dpm_2",
"dpm_2_ancestral",
"lms",
"dpm_fast",
"dpm_adaptive",
"dpmpp_2s_ancestral",
"dpmpp_2s_ancestral_cfg_pp",
"dpmpp_sde",
"dpmpp_sde_gpu",
"dpmpp_2m",
"dpmpp_2m_cfg_pp",
"dpmpp_2m_sde",
"dpmpp_2m_sde_gpu",
"dpmpp_3m_sde",
"dpmpp_3m_sde_gpu",
"ddpm",
"lcm",
"ipndm",
"ipndm_v",
"deis",
"res_multistep",
"res_multistep_cfg_pp",
"res_multistep_ancestral",
"res_multistep_ancestral_cfg_pp",
"gradient_estimation",
"er_sde",
"ddim",
"uni_pc",
"uni_pc_bh2"
],
{
"tooltip": "The algorithm used when sampling, this can affect the quality, speed, and style of the generated output."
}
],
"scheduler": [
[
"normal",
"karras",
"exponential",
"sgm_uniform",
"simple",
"ddim_uniform",
"beta",
"linear_quadratic",
"kl_optimal"
],
{
"tooltip": "The scheduler controls how noise is gradually removed to form the image."
}
],
"positive": [
"CONDITIONING",
{
"tooltip": "The conditioning describing the attributes you want to include in the image."
}
],
"negative": [
"CONDITIONING",
{
"tooltip": "The conditioning describing the attributes you want to exclude from the image."
}
],
"latent_image": [
"LATENT",
{
"tooltip": "The latent image to denoise."
}
],
"denoise": [
"FLOAT",
{
"default": 1.0,
"min": 0.0,
"max": 1.0,
"step": 0.01,
"tooltip": "The amount of denoising applied, lower values will maintain the structure of the initial image allowing for image to image sampling."
}
]
}
},
"input_order": {
"required": [
"model",
"seed",
"steps",
"cfg",
"sampler_name",
"scheduler",
"positive",
"negative",
"latent_image",
"denoise"
]
},
"output": [
"LATENT"
],
"output_is_list": [
false
],
"output_name": [
"LATENT"
],
"name": "KSampler",
"display_name": "KSampler",
"description": "Uses the provided model, positive and negative conditioning to denoise the latent image.",
"python_module": "nodes",
"category": "sampling",
"output_node": false,
"output_tooltips": [
"The denoised latent."
]
}
}