| { |
| "id": "908d0bfb-e192-4627-9b57-147496e6e2dd", |
| "revision": 0, |
| "last_node_id": 51, |
| "last_link_id": 70, |
| "nodes": [ |
| { |
| "id": 9, |
| "type": "SaveImage", |
| "pos": [ |
| 380, |
| 110 |
| ], |
| "size": [ |
| 640, |
| 660 |
| ], |
| "flags": {}, |
| "order": 9, |
| "mode": 0, |
| "inputs": [ |
| { |
| "name": "images", |
| "type": "IMAGE", |
| "link": 9 |
| } |
| ], |
| "outputs": [], |
| "properties": { |
| "cnr_id": "comfy-core", |
| "ver": "0.3.40" |
| }, |
| "widgets_values": [ |
| "flux_krea/flux_krea" |
| ] |
| }, |
| { |
| "id": 40, |
| "type": "DualCLIPLoader", |
| "pos": [ |
| -320, |
| 290 |
| ], |
| "size": [ |
| 270, |
| 130 |
| ], |
| "flags": {}, |
| "order": 0, |
| "mode": 0, |
| "inputs": [], |
| "outputs": [ |
| { |
| "name": "CLIP", |
| "type": "CLIP", |
| "links": [ |
| 64 |
| ] |
| } |
| ], |
| "properties": { |
| "Node name for S&R": "DualCLIPLoader", |
| "cnr_id": "comfy-core", |
| "ver": "0.3.40", |
| "models": [ |
| { |
| "name": "clip_l.safetensors", |
| "url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors", |
| "directory": "text_encoders" |
| }, |
| { |
| "name": "t5xxl_fp16.safetensors", |
| "url": "https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors", |
| "directory": "text_encoders" |
| } |
| ] |
| }, |
| "widgets_values": [ |
| "clip_l.safetensors", |
| "t5xxl_fp16.safetensors", |
| "flux", |
| "default" |
| ] |
| }, |
| { |
| "id": 39, |
| "type": "VAELoader", |
| "pos": [ |
| -320, |
| 470 |
| ], |
| "size": [ |
| 270, |
| 58 |
| ], |
| "flags": {}, |
| "order": 1, |
| "mode": 0, |
| "inputs": [], |
| "outputs": [ |
| { |
| "name": "VAE", |
| "type": "VAE", |
| "links": [ |
| 58 |
| ] |
| } |
| ], |
| "properties": { |
| "Node name for S&R": "VAELoader", |
| "cnr_id": "comfy-core", |
| "ver": "0.3.40", |
| "models": [ |
| { |
| "name": "ae.safetensors", |
| "url": "https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/resolve/main/split_files/vae/ae.safetensors", |
| "directory": "vae" |
| } |
| ] |
| }, |
| "widgets_values": [ |
| "ae.safetensors" |
| ] |
| }, |
| { |
| "id": 42, |
| "type": "ConditioningZeroOut", |
| "pos": [ |
| -10, |
| 460 |
| ], |
| "size": [ |
| 200, |
| 30 |
| ], |
| "flags": { |
| "collapsed": false |
| }, |
| "order": 6, |
| "mode": 0, |
| "inputs": [ |
| { |
| "name": "conditioning", |
| "type": "CONDITIONING", |
| "link": 66 |
| } |
| ], |
| "outputs": [ |
| { |
| "name": "CONDITIONING", |
| "type": "CONDITIONING", |
| "links": [ |
| 63 |
| ] |
| } |
| ], |
| "properties": { |
| "Node name for S&R": "ConditioningZeroOut", |
| "cnr_id": "comfy-core", |
| "ver": "0.3.40" |
| }, |
| "widgets_values": [] |
| }, |
| { |
| "id": 8, |
| "type": "VAEDecode", |
| "pos": [ |
| 230, |
| 470 |
| ], |
| "size": [ |
| 210, |
| 46 |
| ], |
| "flags": { |
| "collapsed": true |
| }, |
| "order": 8, |
| "mode": 0, |
| "inputs": [ |
| { |
| "name": "samples", |
| "type": "LATENT", |
| "link": 52 |
| }, |
| { |
| "name": "vae", |
| "type": "VAE", |
| "link": 58 |
| } |
| ], |
| "outputs": [ |
| { |
| "name": "IMAGE", |
| "type": "IMAGE", |
| "slot_index": 0, |
| "links": [ |
| 9 |
| ] |
| } |
| ], |
| "properties": { |
| "Node name for S&R": "VAEDecode", |
| "cnr_id": "comfy-core", |
| "ver": "0.3.40" |
| }, |
| "widgets_values": [] |
| }, |
| { |
| "id": 27, |
| "type": "EmptySD3LatentImage", |
| "pos": [ |
| -320, |
| 630 |
| ], |
| "size": [ |
| 270, |
| 120 |
| ], |
| "flags": {}, |
| "order": 2, |
| "mode": 0, |
| "inputs": [], |
| "outputs": [ |
| { |
| "name": "LATENT", |
| "type": "LATENT", |
| "slot_index": 0, |
| "links": [ |
| 51 |
| ] |
| } |
| ], |
| "properties": { |
| "Node name for S&R": "EmptySD3LatentImage", |
| "cnr_id": "comfy-core", |
| "ver": "0.3.40" |
| }, |
| "widgets_values": [ |
| 1024, |
| 1024, |
| 1 |
| ] |
| }, |
| { |
| "id": 45, |
| "type": "CLIPTextEncode", |
| "pos": [ |
| 10, |
| 170 |
| ], |
| "size": [ |
| 330, |
| 210 |
| ], |
| "flags": {}, |
| "order": 5, |
| "mode": 0, |
| "inputs": [ |
| { |
| "name": "clip", |
| "type": "CLIP", |
| "link": 64 |
| } |
| ], |
| "outputs": [ |
| { |
| "name": "CONDITIONING", |
| "type": "CONDITIONING", |
| "links": [ |
| 65, |
| 66 |
| ] |
| } |
| ], |
| "properties": { |
| "Node name for S&R": "CLIPTextEncode", |
| "cnr_id": "comfy-core", |
| "ver": "0.3.47" |
| }, |
| "widgets_values": [ |
| "Highly realistic portrait of a Nordic woman with blonde hair and blue eyes, very few freckles on her face, gaze sharp and intellectual. The lighting should reflect the unique coolness of Northern Europe. Outfit is minimalist and modern, background is blurred in cool tones. Needs to perfectly capture the characteristics of a Scandinavian woman. solo, Centered composition\n" |
| ] |
| }, |
| { |
| "id": 31, |
| "type": "KSampler", |
| "pos": [ |
| 10, |
| 550 |
| ], |
| "size": [ |
| 315, |
| 262 |
| ], |
| "flags": {}, |
| "order": 7, |
| "mode": 0, |
| "inputs": [ |
| { |
| "name": "model", |
| "type": "MODEL", |
| "link": 61 |
| }, |
| { |
| "name": "positive", |
| "type": "CONDITIONING", |
| "link": 65 |
| }, |
| { |
| "name": "negative", |
| "type": "CONDITIONING", |
| "link": 63 |
| }, |
| { |
| "name": "latent_image", |
| "type": "LATENT", |
| "link": 51 |
| } |
| ], |
| "outputs": [ |
| { |
| "name": "LATENT", |
| "type": "LATENT", |
| "slot_index": 0, |
| "links": [ |
| 52 |
| ] |
| } |
| ], |
| "properties": { |
| "Node name for S&R": "KSampler", |
| "cnr_id": "comfy-core", |
| "ver": "0.3.40" |
| }, |
| "widgets_values": [ |
| 277251746703202, |
| "randomize", |
| 20, |
| 1, |
| "euler", |
| "simple", |
| 1 |
| ] |
| }, |
| { |
| "id": 38, |
| "type": "UNETLoader", |
| "pos": [ |
| -320, |
| 150 |
| ], |
| "size": [ |
| 270, |
| 82 |
| ], |
| "flags": {}, |
| "order": 3, |
| "mode": 0, |
| "inputs": [], |
| "outputs": [ |
| { |
| "name": "MODEL", |
| "type": "MODEL", |
| "links": [ |
| 61 |
| ] |
| } |
| ], |
| "properties": { |
| "Node name for S&R": "UNETLoader", |
| "cnr_id": "comfy-core", |
| "ver": "0.3.40", |
| "models": [ |
| { |
| "name": "flux1-krea-dev_fp8_scaled.safetensors", |
| "url": "https://huggingface.co/Comfy-Org/FLUX.1-Krea-dev_ComfyUI/resolve/main/split_files/diffusion_models/flux1-krea-dev_fp8_scaled.safetensors", |
| "directory": "diffusion_models" |
| } |
| ] |
| }, |
| "widgets_values": [ |
| "flux1-krea-dev_fp8_scaled.safetensors", |
| "default" |
| ] |
| }, |
| { |
| "id": 43, |
| "type": "MarkdownNote", |
| "pos": [ |
| -870, |
| 110 |
| ], |
| "size": [ |
| 520, |
| 390 |
| ], |
| "flags": {}, |
| "order": 4, |
| "mode": 0, |
| "inputs": [], |
| "outputs": [], |
| "title": "Model links", |
| "properties": {}, |
| "widgets_values": [ |
| "## Model links\n\n**Diffusion Model**\n\n- [flux1-krea-dev_fp8_scaled.safetensors](https://huggingface.co/Comfy-Org/FLUX.1-Krea-dev_ComfyUI/resolve/main/split_files/diffusion_models/flux1-krea-dev_fp8_scaled.safetensors)\n\nIf you need the original weights, head to [black-forest-labs/FLUX.1-Krea-dev](https://huggingface.co/black-forest-labs/FLUX.1-Krea-dev/), accept the agreement in the repo, then click the link below to download the models:\n\n- [flux1-krea-dev.safetensors](https://huggingface.co/black-forest-labs/FLUX.1-Krea-dev/resolve/main/flux1-krea-dev.safetensors)\n\n**Text Encoder**\n\n- [clip_l.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/clip_l.safetensors)\n\n- [t5xxl_fp16.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors) or [t5xxl_fp8_e4m3fn_scaled.safetensors](https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp8_e4m3fn_scaled.safetensors)\n\n**VAE**\n\n- [ae.safetensors](https://huggingface.co/Comfy-Org/Lumina_Image_2.0_Repackaged/resolve/main/split_files/vae/ae.safetensors)\n\n\n```\nComfyUI/\n├── models/\n│ ├── diffusion_models/\n│ │ └─── flux1-krea-dev_fp8_scaled.safetensors\n│ ├── text_encoders/\n│ │ ├── clip_l.safetensors\n│ │ └─── t5xxl_fp16.safetensors # or t5xxl_fp8_e4m3fn_scaled.safetensors\n│ └── vae/\n│ └── ae.safetensors\n```\n" |
| ], |
| "color": "#432", |
| "bgcolor": "#653" |
| } |
| ], |
| "links": [ |
| [ |
| 9, |
| 8, |
| 0, |
| 9, |
| 0, |
| "IMAGE" |
| ], |
| [ |
| 51, |
| 27, |
| 0, |
| 31, |
| 3, |
| "LATENT" |
| ], |
| [ |
| 52, |
| 31, |
| 0, |
| 8, |
| 0, |
| "LATENT" |
| ], |
| [ |
| 58, |
| 39, |
| 0, |
| 8, |
| 1, |
| "VAE" |
| ], |
| [ |
| 61, |
| 38, |
| 0, |
| 31, |
| 0, |
| "MODEL" |
| ], |
| [ |
| 63, |
| 42, |
| 0, |
| 31, |
| 2, |
| "CONDITIONING" |
| ], |
| [ |
| 64, |
| 40, |
| 0, |
| 45, |
| 0, |
| "CLIP" |
| ], |
| [ |
| 65, |
| 45, |
| 0, |
| 31, |
| 1, |
| "CONDITIONING" |
| ], |
| [ |
| 66, |
| 45, |
| 0, |
| 42, |
| 0, |
| "CONDITIONING" |
| ] |
| ], |
| "groups": [ |
| { |
| "id": 1, |
| "title": "Step 1 - Load Models Here", |
| "bounding": [ |
| -330, |
| 80, |
| 300, |
| 460 |
| ], |
| "color": "#3f789e", |
| "font_size": 24, |
| "flags": {} |
| }, |
| { |
| "id": 2, |
| "title": "Step 2 - Image Size", |
| "bounding": [ |
| -330, |
| 560, |
| 300, |
| 200 |
| ], |
| "color": "#3f789e", |
| "font_size": 24, |
| "flags": {} |
| }, |
| { |
| "id": 3, |
| "title": "Step 3 - Prompt", |
| "bounding": [ |
| -10, |
| 80, |
| 360, |
| 333.6000061035156 |
| ], |
| "color": "#3f789e", |
| "font_size": 24, |
| "flags": {} |
| } |
| ], |
| "config": {}, |
| "extra": { |
| "ds": { |
| "scale": 0.6534031413612565, |
| "offset": [ |
| 1016.3535547042429, |
| 202.5185688696577 |
| ] |
| }, |
| "frontendVersion": "1.25.3", |
| "VHS_latentpreview": false, |
| "VHS_latentpreviewrate": 0, |
| "VHS_MetadataImage": true, |
| "VHS_KeepIntermediate": true |
| }, |
| "version": 0.4 |
| } |