LPX55's picture
Update requirements.txt (#1)
b20ba0c verified
import os
import json
import uuid
import logging
from datetime import datetime
from pathlib import Path
from huggingface_hub import CommitScheduler, login
# --- Setup Standard Logger ---
logger = logging.getLogger(__name__)
# --- Configuration ---
LOG_ID = os.environ.get("LOG_ID")
HF_TOKEN = os.environ.get("HF_TOKEN")
LOCAL_LOG_DIR = Path("debug_logs")
LOCAL_LOG_DIR.mkdir(parents=True, exist_ok=True)
# --- Initialize Scheduler ---
scheduler = None
if LOG_ID:
if HF_TOKEN:
login(token=HF_TOKEN)
try:
scheduler = CommitScheduler(
repo_id=LOG_ID,
repo_type="dataset",
folder_path=LOCAL_LOG_DIR,
path_in_repo="data",
every=15,
token=HF_TOKEN
)
logger.info(f"CommitScheduler initialized for {LOG_ID}")
except Exception as e:
logger.error(f"Failed to initialize scheduler: {e}")
def debug_ref(prompt, output_image, input_images=None, extra_params=None):
"""
Logs generation with multiple input images and roles.
Args:
prompt (str): The text prompt.
output_image (PIL.Image): The generated result.
input_images (list of dict): List of inputs.
Example: [{'image': PIL.Image, 'role': 'style'}, {'image': PIL.Image, 'role': 'controlnet'}]
extra_params (dict): Generation parameters (seed, steps, etc.).
"""
if scheduler is None:
return
try:
unique_id = str(uuid.uuid4())
output_filename = f"{unique_id}_output.png"
output_path = LOCAL_LOG_DIR / output_filename
output_image.save(output_path)
input_refs_data = []
if input_images:
for idx, item in enumerate(input_images):
img = item.get('image')
role = item.get('role', f'ref_{idx}') # Default role if not provided
if img:
safe_role = role.replace(" ", "_").replace("/", "-")
input_filename = f"{unique_id}_{safe_role}.png"
input_path = LOCAL_LOG_DIR / input_filename
img.save(input_path)
input_refs_data.append({
"role": role,
"filename": input_filename
})
log_entry = {
"prompt": prompt,
"timestamp": datetime.now().isoformat(),
"inference_id": unique_id,
"output_filename": output_filename,
"inputs": input_refs_data # List of dicts with role/filename
}
if extra_params:
log_entry.update(extra_params)
metadata_filename = LOCAL_LOG_DIR / "metadata.jsonl"
with scheduler.lock:
with open(metadata_filename, "a") as f:
f.write(json.dumps(log_entry) + "\n")
logger.debug(f"Debug log scheduled: {unique_id}.")
except Exception as e:
logger.error(f"Debug Log Error: {e}")