Spaces:
Running
on
Zero
Running
on
Zero
| import gradio as gr | |
| import torch | |
| import spaces | |
| from diffusers import ( | |
| StableDiffusionXLPipeline, | |
| EulerAncestralDiscreteScheduler, | |
| EulerDiscreteScheduler, | |
| DPMSolverMultistepScheduler, | |
| UniPCMultistepScheduler | |
| ) | |
| repo_id = "ChenkinNoob/ChenkinNoob-XL-V0.1" | |
| ckpt_filename = "ChenkinNoob-XL-V0.1.safetensors" | |
| print(f"正在加载模型文件: {ckpt_filename}...") | |
| pipe = StableDiffusionXLPipeline.from_single_file( | |
| f"https://huggingface.co/{repo_id}/blob/main/{ckpt_filename}", | |
| torch_dtype=torch.float16, | |
| use_safetensors=True | |
| ) | |
| print("模型加载完成 (CPU Ready)") | |
| DEFAULT_PREFIX = "masterpiece, best quality, newest, absurdres, highres, safe, " | |
| DEFAULT_NEGATIVE = "nsfw, worst quality, old, early, low quality, lowres, signature, username, logo, bad hands, mutated hands, mammal, anthro, furry, ambiguous form, feral, semi-anthro" | |
| def set_scheduler(name): | |
| config = pipe.scheduler.config | |
| if name == "Euler a (推荐)": | |
| pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(config) | |
| elif name == "Euler": | |
| pipe.scheduler = EulerDiscreteScheduler.from_config(config) | |
| elif name == "DPM++ 2M Karras": | |
| pipe.scheduler = DPMSolverMultistepScheduler.from_config(config, use_karras_sigmas=True) | |
| elif name == "DPM++ SDE Karras": | |
| pipe.scheduler = DPMSolverMultistepScheduler.from_config(config, use_karras_sigmas=True, algorithm_type="sde-dpmsolver++") | |
| elif name == "UniPC": | |
| pipe.scheduler = UniPCMultistepScheduler.from_config(config) | |
| def generate_image(prompt, negative_prompt, steps, cfg_scale, width, height, sampler_name, seed): | |
| pipe.to("cuda") | |
| set_scheduler(sampler_name) | |
| final_prompt = DEFAULT_PREFIX + prompt | |
| if seed == -1: | |
| generator = None | |
| else: | |
| generator = torch.Generator("cuda").manual_seed(int(seed)) | |
| image = pipe( | |
| prompt=final_prompt, | |
| negative_prompt=negative_prompt, | |
| num_inference_steps=steps, | |
| guidance_scale=cfg_scale, | |
| width=width, | |
| height=height, | |
| generator=generator | |
| ).images[0] | |
| return image | |
| css = """ | |
| #col-container {max-width: 800px; margin-left: auto; margin-right: auto;} | |
| """ | |
| with gr.Blocks(css=css) as demo: | |
| with gr.Column(elem_id="col-container"): | |
| gr.Markdown(f"# ChenkinNoob-XL-V0.1 在线测试 (ZeroGPU)") | |
| gr.Markdown("基于 NoobAI-XL-1.1 微调,支持多采样器切换。") | |
| with gr.Row(): | |
| with gr.Column(): | |
| prompt_input = gr.Textbox( | |
| label="Prompt (提示词)", | |
| placeholder="例如: <1girl>, solo, white hair, blue eyes...", | |
| lines=3 | |
| ) | |
| negative_input = gr.Textbox( | |
| label="Negative Prompt (负面提示词)", | |
| value=DEFAULT_NEGATIVE, | |
| lines=2 | |
| ) | |
| with gr.Accordion("参数设置", open=True): | |
| sampler_dropdown = gr.Dropdown( | |
| label="Sampling Method (采样器)", | |
| choices=["Euler a (推荐)", "Euler", "DPM++ 2M Karras", "DPM++ SDE Karras", "UniPC"], | |
| value="Euler a (推荐)", | |
| interactive=True | |
| ) | |
| with gr.Row(): | |
| steps = gr.Slider(label="Steps", minimum=10, maximum=50, value=28, step=1) | |
| cfg = gr.Slider(label="CFG Scale", minimum=1, maximum=10, value=5.5, step=0.1) | |
| with gr.Row(): | |
| width = gr.Slider(label="Width", minimum=512, maximum=2048, value=1024, step=64) | |
| height = gr.Slider(label="Height", minimum=512, maximum=2048, value=1024, step=64) | |
| seed = gr.Number(label="Seed (-1 for random)", value=-1) | |
| run_btn = gr.Button("生成图片", variant="primary") | |
| with gr.Column(): | |
| result_image = gr.Image(label="生成结果") | |
| run_btn.click( | |
| fn=generate_image, | |
| inputs=[prompt_input, negative_input, steps, cfg, width, height, sampler_dropdown, seed], | |
| outputs=[result_image] | |
| ) | |
| demo.queue().launch() |