videopix commited on
Commit
07d8ca5
·
verified ·
1 Parent(s): 32ca7ba

Update app_old.py

Browse files
Files changed (1) hide show
  1. app_old.py +134 -0
app_old.py CHANGED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os, io, base64, asyncio, torch, spaces
2
+ from fastapi import FastAPI, Request
3
+ from fastapi.middleware.cors import CORSMiddleware
4
+ from fastapi.responses import HTMLResponse, JSONResponse
5
+ from diffusers import FluxPipeline
6
+ from PIL import Image
7
+ from concurrent.futures import ThreadPoolExecutor
8
+
9
+ HF_TOKEN = os.getenv("HF_TOKEN")
10
+ BASE_MODEL = "black-forest-labs/FLUX.1-schnell"
11
+
12
+ _cached = {}
13
+ # moderate concurrency so CPU doesn’t choke
14
+ executor = ThreadPoolExecutor(max_workers=3)
15
+ semaphore = asyncio.Semaphore(3)
16
+
17
+ def load_pipeline():
18
+ if "flux" in _cached:
19
+ return _cached["flux"]
20
+ print("🔹 Loading FLUX.1-schnell (fast mode)")
21
+ pipe = FluxPipeline.from_pretrained(
22
+ BASE_MODEL,
23
+ torch_dtype=torch.float16,
24
+ use_auth_token=HF_TOKEN,
25
+ ).to("cpu", dtype=torch.float16)
26
+ pipe.enable_attention_slicing()
27
+ pipe.enable_vae_tiling()
28
+ _cached["flux"] = pipe
29
+ return pipe
30
+
31
+ def generate_image_sync(prompt: str, seed: int = 42):
32
+ pipe = load_pipeline()
33
+ gen = torch.Generator(device="cpu").manual_seed(int(seed))
34
+ # smaller size and steps for speed
35
+ w, h = 768, 432
36
+ image = pipe(
37
+ prompt=prompt,
38
+ width=w,
39
+ height=h,
40
+ num_inference_steps=4,
41
+ guidance_scale=3,
42
+ generator=gen,
43
+ ).images[0]
44
+ # slight upscale back to 960×540 to keep output clear
45
+ return image.resize((960, 540), Image.BICUBIC)
46
+
47
+ async def generate_image_async(prompt, seed):
48
+ async with semaphore:
49
+ loop = asyncio.get_running_loop()
50
+ return await loop.run_in_executor(executor, generate_image_sync, prompt, seed)
51
+
52
+ app = FastAPI(title="FLUX Fast API", version="3.1")
53
+ app.add_middleware(
54
+ CORSMiddleware,
55
+ allow_origins=["*"],
56
+ allow_credentials=True,
57
+ allow_methods=["*"],
58
+ allow_headers=["*"],
59
+ )
60
+
61
+ @app.get("/", response_class=HTMLResponse)
62
+ def home():
63
+ return """
64
+ <html><head><title>FLUX Fast</title>
65
+ <style>body{font-family:Arial;text-align:center;padding:2rem}
66
+ input,button{margin:.5rem;padding:.6rem;width:300px;border-radius:6px;border:1px solid #ccc}
67
+ button{background:#444;color:#fff}button:hover{background:#333}
68
+ img{margin-top:1rem;max-width:90%;border-radius:12px}</style></head>
69
+ <body><h2>🎨 FLUX Fast Generator</h2>
70
+ <form id='f'><input id='prompt' placeholder='Describe image...' required><br>
71
+ <input id='seed' type='number' value='42'><br>
72
+ <button>Generate</button></form><div id='out'></div>
73
+ <script>
74
+ const form = document.getElementById("f");
75
+ const promptInput = document.getElementById("prompt");
76
+ const seedInput = document.getElementById("seed");
77
+ const resultDiv = document.getElementById("out");
78
+ form.addEventListener("submit", async (e) => {
79
+ e.preventDefault();
80
+ const prompt = promptInput.value.trim();
81
+ if (!prompt) {
82
+ resultDiv.innerHTML = "<p style='color:red'>❌ Please enter a prompt</p>";
83
+ return;
84
+ }
85
+ resultDiv.innerHTML = "<p>⏳ Generating...</p>";
86
+ const payload = {
87
+ prompt: prompt,
88
+ seed: parseInt(seedInput.value || 42)
89
+ };
90
+ const res = await fetch("/api/generate", {
91
+ method: "POST",
92
+ headers: { "Content-Type": "application/json" },
93
+ body: JSON.stringify(payload)
94
+ });
95
+ const json = await res.json();
96
+ if (json.status === "success") {
97
+ resultDiv.innerHTML = `<img src="data:image/png;base64,${json.image_base64}"/><p>✅ Done!</p>`;
98
+ } else {
99
+ resultDiv.innerHTML = `<p style='color:red'>❌ ${json.message}</p>`;
100
+ }
101
+ });
102
+ </script>
103
+ </body></html>
104
+ """
105
+
106
+ @app.post("/api/generate")
107
+ async def api_generate(request: Request):
108
+ try:
109
+ data = await request.json()
110
+ prompt = str(data.get("prompt", "")).strip()
111
+ seed = int(data.get("seed", 42))
112
+ if not prompt:
113
+ return JSONResponse({"status": "error", "message": "Prompt required"}, 400)
114
+ except Exception:
115
+ return JSONResponse({"status": "error", "message": "Invalid JSON"}, 400)
116
+
117
+ try:
118
+ image = await generate_image_async(prompt, seed)
119
+ buf = io.BytesIO()
120
+ image.save(buf, format="PNG")
121
+ img64 = base64.b64encode(buf.getvalue()).decode("utf-8")
122
+ return JSONResponse({"status": "success", "prompt": prompt, "image_base64": img64})
123
+ except Exception as e:
124
+ print(f"❌ Error: {e}")
125
+ return JSONResponse({"status": "error", "message": str(e)}, 500)
126
+
127
+ @spaces.GPU
128
+ def keep_alive(): return "ZeroGPU Ready"
129
+
130
+ if __name__ == "__main__":
131
+ import uvicorn
132
+ print("🚀 Launching Fast FLUX API")
133
+ keep_alive()
134
+ uvicorn.run(app, host="0.0.0.0", port=7860)