Spaces:
Running
Running
File size: 1,889 Bytes
2928f8e 21f1d5b 2928f8e 21f1d5b aafc75e 21f1d5b 2928f8e 21f1d5b 2928f8e 21f1d5b 2928f8e aafc75e 21f1d5b 59f7211 21f1d5b 2928f8e 21f1d5b 2928f8e aafc75e 2928f8e 21f1d5b 59f7211 21f1d5b 59f7211 2928f8e 21f1d5b 2928f8e 59f7211 2928f8e 21f1d5b 2928f8e aafc75e 2928f8e 59f7211 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 | import gradio as gr
import torch
from diffusers import StableDiffusionPipeline
import random
import numpy as np
# 1. Load a CPU-optimized model
# 'segmind/tiny-sd' is much smaller and faster on CPUs than SDXL
model_id = "segmind/tiny-sd"
# Use float32 because CPU doesn't support float16 well
pipe = StableDiffusionPipeline.from_pretrained(
model_id,
torch_dtype=torch.float32
)
pipe = pipe.to("cpu")
# Optimize for CPU speed
pipe.set_progress_bar_config(disable=True)
MAX_SEED = np.iinfo(np.int32).max
def infer(prompt, seed, randomize_seed, width, height):
if randomize_seed:
seed = random.randint(0, MAX_SEED)
generator = torch.Generator("cpu").manual_seed(seed)
# We use very low steps (10-15) because CPU is slow
image = pipe(
prompt=prompt,
generator=generator,
num_inference_steps=15,
guidance_scale=7.0,
width=width,
height=height
).images[0]
return image, seed
# Simple UI
with gr.Blocks() as demo:
gr.Markdown("# CodeIgnite CPU Image Engine")
with gr.Column():
prompt = gr.Textbox(label="Prompt", placeholder="A simple cat drawing")
run_button = gr.Button("Generate (CPU Mode)")
result = gr.Image(label="Result")
with gr.Accordion("Settings", open=False):
seed = gr.Slider(label="Seed", minimum=0, maximum=MAX_SEED, step=1, value=0)
randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
width = gr.Slider(label="Width", minimum=256, maximum=512, step=32, value=384)
height = gr.Slider(label="Height", minimum=256, maximum=512, step=32, value=384)
run_button.click(
fn=infer,
inputs=[prompt, seed, randomize_seed, width, height],
outputs=[result, seed],
api_name="predict"
)
if __name__ == "__main__":
demo.launch() |