import gradio as gr import os import tempfile import shutil import re import json import datetime from pathlib import Path from huggingface_hub import HfApi, hf_hub_download from safetensors.torch import load_file import torch import subprocess # --- Conversion Function: Safetensors (UNet) → GGUF --- def convert_unet_to_gguf(safetensors_path, output_dir, progress=gr.Progress()): """ Converts a UNet safetensors file to GGUF using gguf-connector's CLI (t2). """ progress(0.1, desc="Starting UNet to GGUF conversion...") try: # Ensure gguf-connector is available import gguf_connector # noqa # Prepare working directory work_dir = tempfile.mkdtemp() input_name = os.path.basename(safetensors_path) input_path = os.path.join(work_dir, input_name) shutil.copy(safetensors_path, input_path) # Expected GGUF output name: same basename + .gguf base_name = os.path.splitext(input_name)[0] gguf_output_path = os.path.join(work_dir, f"{base_name}.gguf") progress(0.3, desc="Running gguf-connector (t2: safetensors → GGUF)...") # Run ggc t2 in the work directory original_cwd = os.getcwd() os.chdir(work_dir) try: result = subprocess.run( ["ggc", "t2"], input="1\n", # auto-select first file text=True, capture_output=True, timeout=600 ) if result.returncode != 0: raise RuntimeError(f"ggc t2 failed: {result.stderr}") finally: os.chdir(original_cwd) # Check if GGUF was created if not os.path.exists(gguf_output_path): candidates = [f for f in os.listdir(work_dir) if f.endswith(".gguf")] if not candidates: raise FileNotFoundError("No GGUF file generated by ggc t2") gguf_output_path = os.path.join(work_dir, candidates[0]) # Move to final output final_gguf_path = os.path.join(output_dir, os.path.basename(gguf_output_path)) shutil.move(gguf_output_path, final_gguf_path) # Save minimal config config_path = os.path.join(output_dir, "config.json") with open(config_path, "w") as f: json.dump({ "model_type": "unet", "format": "gguf", "source_file": input_name }, f) progress(1.0, desc="Conversion to GGUF complete!") return True, "UNet converted to GGUF successfully." except Exception as e: return False, str(e) finally: if 'work_dir' in locals(): shutil.rmtree(work_dir, ignore_errors=True) # --- Main Processing Function --- def process_and_upload_unet_to_gguf(repo_url, safetensors_filename, hf_token, new_repo_id, private_repo, progress=gr.Progress()): if not all([repo_url, safetensors_filename, hf_token, new_repo_id]): return None, "❌ Error: Please fill in all fields.", "" if not re.match(r"^[a-zA-Z0-9._-]+/[a-zA-Z0-9._-]+$", new_repo_id): return None, "❌ Error: Invalid repository ID format. Use 'username/model-name'.", "" temp_dir = tempfile.mkdtemp() output_dir = tempfile.mkdtemp() try: # Authenticate progress(0.05, desc="Logging into Hugging Face...") api = HfApi(token=hf_token) user_info = api.whoami() user_name = user_info['name'] progress(0.1, desc=f"Logged in as {user_name}.") # Parse source repo clean_url = repo_url.strip().rstrip("/") if "huggingface.co" not in clean_url: return None, "❌ Source must be a Hugging Face model repo.", "" src_repo_id = clean_url.replace("https://huggingface.co/", "") # Download specified safetensors file progress(0.15, desc=f"Downloading {safetensors_filename}...") safetensors_path = hf_hub_download( repo_id=src_repo_id, filename=safetensors_filename, cache_dir=temp_dir, token=hf_token ) progress(0.3, desc="Download complete.") # Convert success, msg = convert_unet_to_gguf(safetensors_path, output_dir, progress) if not success: return None, f"❌ Conversion failed: {msg}", "" # Create new repo progress(0.8, desc="Creating new repository...") api.create_repo( repo_id=new_repo_id, private=private_repo, repo_type="model", exist_ok=True ) # Generate README readme = f"""--- library_name: diffusers tags: - gguf - unet - diffusion - converted-by-gradio --- # GGUF Model Converted from: [`{src_repo_id}`](https://huggingface.co/{src_repo_id}) File: `{safetensors_filename}` → `{os.path.splitext(safetensors_filename)[0]}.gguf` Converted by: {user_name} Date: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')} """ with open(os.path.join(output_dir, "README.md"), "w") as f: f.write(readme) # Upload progress(0.9, desc="Uploading to Hugging Face Hub...") api.upload_folder( repo_id=new_repo_id, folder_path=output_dir, repo_type="model", token=hf_token, commit_message="Upload converted GGUF model" ) progress(1.0, desc="✅ Done!") result_html = f""" ✅ Success! Your GGUF model is uploaded to: [{new_repo_id}](https://huggingface.co/{new_repo_id}) Visibility: {'Private' if private_repo else 'Public'} """ return gr.HTML(result_html), "✅ Conversion and upload successful!", "" except Exception as e: return None, f"❌ Error: {str(e)}", "" finally: shutil.rmtree(temp_dir, ignore_errors=True) shutil.rmtree(output_dir, ignore_errors=True) # --- Gradio UI --- with gr.Blocks(title="Safetensors → GGUF Converter") as demo: gr.Markdown("# 🔄 Safetensors to GGUF Converter") gr.Markdown("Converts any `.safetensors` file from a Hugging Face model repo to GGUF format.") with gr.Row(): with gr.Column(): repo_url = gr.Textbox( label="Source Model Repository URL", placeholder="https://huggingface.co/Yabo/FramePainter", info="Hugging Face model repo containing your safetensors file" ) safetensors_filename = gr.Textbox( label="Safetensors Filename", placeholder="unet.safetensors", info="Name of the .safetensors file in the repo (e.g., unet.safetensors)" ) hf_token = gr.Textbox( label="Hugging Face Token", type="password", info="Write-access token from https://huggingface.co/settings/tokens" ) with gr.Column(): new_repo_id = gr.Textbox( label="New Repository ID", placeholder="your-username/my-model-gguf", info="Format: username/model-name" ) private_repo = gr.Checkbox(label="Make Private", value=False) convert_btn = gr.Button("🚀 Convert & Upload", variant="primary") with gr.Row(): status_output = gr.Markdown() repo_link_output = gr.HTML() convert_btn.click( fn=process_and_upload_unet_to_gguf, inputs=[repo_url, safetensors_filename, hf_token, new_repo_id, private_repo], outputs=[repo_link_output, status_output], show_progress=True ) gr.Examples( examples=[ ["https://huggingface.co/Yabo/FramePainter", "unet_diffusion_pytorch_model.safetensors"] ], inputs=[repo_url, safetensors_filename] ) demo.launch()