File size: 5,765 Bytes
8d3e06d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 |
import os
import json
import time
import random
import uuid
import requests
import gradio as gr
from io import BytesIO
from PIL import Image
from huggingface_hub import snapshot_download
import shutil
# === AUTO DOWNLOAD EVERYTHING FROM OFFICIAL REPO ===
REPO_ID = "Tongyi-MAI/Z-Image-Turbo"
LOCAL_DIR = "z-image-models"
print("Downloading ALL files from Tongyi-MAI/Z-Image-Turbo... (this takes ~3-5 mins first time)")
snapshot_download(
repo_id=REPO_ID,
local_dir=LOCAL_DIR,
local_dir_use_symlinks=False,
allow_patterns=["*.safetensors", "*.json", "*.yaml", "*.yml", "*.bin"],
ignore_patterns=["*.git*", "*.md"]
)
# === Organize into ComfyUI folders ===
os.makedirs("models/checkpoints", exist_ok=True)
os.makedirs("models/vae", exist_ok=True)
os.makedirs("models/config", exist_ok=True)
for file in os.listdir(LOCAL_DIR):
src = os.path.join(LOCAL_DIR, file)
if file.endswith(".safetensors") or file.endswith(".ckpt"):
if "vae" in file.lower():
shutil.copy(src, f"models/vae/{file}")
else:
shutil.copy(src, f"models/checkpoints/{file}")
elif file.endswith((".json", ".yaml", ".yml")):
shutil.copy(src, f"models/config/{file}")
print("All Z-Image-Turbo models downloaded and placed correctly!")
# === Start ComfyUI ===
import subprocess
import threading
def start_comfyui():
if not os.path.exists("ComfyUI"):
print("Cloning ComfyUI...")
subprocess.run(["git", "clone", "https://github.com/comfyanonymous/ComfyUI.git"])
os.chdir("ComfyUI")
subprocess.Popen([
"python", "main.py",
"--listen", "0.0.0.0",
"--port", "8188",
"--force-fp16",
"--disable-auto-launch"
], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
print("ComfyUI starting...")
time.sleep(25)
threading.Thread(target=start_comfyui, daemon=True).start()
COMFYUI_URL = "http://127.0.0.1:8188"
WORKFLOW_PATH = "workflow.json"
# === API Helpers ===
def queue_prompt(workflow):
client_id = str(uuid.uuid4())
r = requests.post(f"{COMFYUI_URL}/prompt", json={"prompt": workflow, "client_id": client_id})
return r.json().get("prompt_id")
def get_image_from_history(prompt_id):
while True:
try:
history = requests.get(f"{COMFYUI_URL}/history/{prompt_id}").json()
if prompt_id in history:
outputs = history[prompt_id]["outputs"]
for node_id in ["7", "14", "15"]: # SaveImage / PreviewImage
if node_id in outputs and "images" in outputs[node_id]:
img_data = outputs[node_id]["images"][0]
url = f"{COMFYUI_URL}/view?filename={img_data['filename']}&type={img_data['type']}&subfolder={img_data.get('subfolder','')}"
return Image.open(BytesIO(requests.get(url).content))
except:
pass
time.sleep(0.6)
# === Generation (100% random every time) ===
def generate(
prompt, negative_prompt="",
seed=-1, steps=20, cfg=7.0,
width=1024, height=1024,
upscale=1.0, batch_size=1
):
with open(WORKFLOW_PATH, "r", encoding="utf-8") as f:
workflow = json.load(f)
base_seed = random.randint(1, 999999999) if seed < 0 else int(seed)
cache_buster = random.random()
images = []
status_msg = ""
for i in range(batch_size):
current_seed = base_seed + i * 1000000
for node in workflow["nodes"]:
# Update prompts
if node["type"] == "CLIPTextEncode":
if "beautiful landscape" in node["widgets_values"][0]:
node["widgets_values"][0] = prompt
if node["widgets_values"][0] == "":
node["widgets_values"][0] = negative_prompt
# Update sampler
if node["type"] == "KSampler":
node["widgets_values"][0] = current_seed
node["widgets_values"][1] = steps
node["widgets_values"][2] = cfg
# Size
if node["type"] == "EmptyLatentImage":
node["widgets_values"][0] = width
node["widgets_values"][1] = height
node["widgets_values"][2] = 1
# Upscale
if node["type"] == "LatentUpscaleBy":
node["widgets_values"][0] = upscale
# Cache buster
node["_b"] = cache_buster + i
pid = queue_prompt(workflow)
if not pid:
yield "ComfyUI not ready yet...", images
return
img = get_image_from_history(pid)
if img:
images.append(img)
status_msg = f"Generated {i+1}/{batch_size} • Seed: {current_seed}"
yield status_msg, images
yield f"Complete! • Seeds: {base_seed} → {current_seed}", images
# === Gradio UI ===
with gr.Blocks(title="Z-Image Turbo • Official Models • Always Random") as demo:
gr.Markdown("# Z-Image Turbo — Official Full Download\n"
"Every file from https://huggingface.co/Tongyi-MAI/Z-Image-Turbo\n"
"100% random • Hi-Res • Batch • No duplicates ever")
with gr.Row():
with gr.Column(scale=2):
prompt = gr.Textbox(
label="Prompt", lines=4,
value="masterpiece, best quality, ultra-detailed, beautiful mountain lake at golden hour, cinematic lighting"
)
negative = gr.Textbox(label="Negative Prompt", value="blurry, ugly, deformed, low quality")
with gr.Column():
seed = gr.Number(label="Seed (-1 = random)", value=-1)
steps = gr.Slider(10, 40, value=20, step=1, label="Steps")
cfg = gr.Slider(3, 15, value=7.0, step=0.1, label |