Spaces:
Build error
Build error
| #!/usr/bin/env python | |
| import os | |
| import shutil | |
| import tempfile | |
| import gradio as gr | |
| from PIL import Image | |
| import numpy as np | |
| from settings import ( | |
| DEFAULT_IMAGE_RESOLUTION, | |
| DEFAULT_NUM_IMAGES, | |
| MAX_IMAGE_RESOLUTION, | |
| MAX_NUM_IMAGES, | |
| MAX_SEED, | |
| ) | |
| from utils import randomize_seed_fn | |
| # ---- helper to build a quick textured copy of the mesh --------------- | |
| def apply_texture(src_mesh:str, texture:str, tag:str)->str: | |
| """ | |
| Writes a copy of `src_mesh` and tiny .mtl that points to `texture`. | |
| Returns the new OBJ/GLB path for viewing. | |
| """ | |
| tmp_dir = tempfile.mkdtemp() | |
| mesh_copy = os.path.join(tmp_dir, f"{tag}.obj") | |
| mtl_name = f"{tag}.mtl" | |
| # copy geometry | |
| shutil.copy(src_mesh, mesh_copy) | |
| # write minimal MTL | |
| with open(os.path.join(tmp_dir, mtl_name), "w") as f: | |
| f.write(f"newmtl material_0\nmap_Kd {os.path.basename(texture)}\n") | |
| # ensure texture lives next to OBJ | |
| shutil.copy(texture, os.path.join(tmp_dir, os.path.basename(texture))) | |
| # patch OBJ to reference our new MTL | |
| with open(mesh_copy, "r+") as f: | |
| lines = f.readlines() | |
| if not lines[0].startswith("mtllib"): | |
| lines.insert(0, f"mtllib {mtl_name}\n") | |
| f.seek(0); f.writelines(lines) | |
| return mesh_copy | |
| def image_to_temp_path(img_like, tag, out_dir=None): | |
| """ | |
| Convert various image-like objects (str, PIL.Image, list, tuple) to temp PNG path. | |
| Returns the path to the saved image file. | |
| """ | |
| # Handle tuple or list input | |
| if isinstance(img_like, (list, tuple)): | |
| if len(img_like) == 0: | |
| raise ValueError("Empty image list/tuple.") | |
| img_like = img_like[0] | |
| # If it's already a file path | |
| if isinstance(img_like, str): | |
| return img_like | |
| # If it's a PIL Image | |
| if isinstance(img_like, Image.Image): | |
| temp_path = os.path.join(tempfile.mkdtemp() if out_dir is None else out_dir, f"{tag}.png") | |
| os.makedirs(os.path.dirname(temp_path), exist_ok=True) | |
| img_like.save(temp_path) | |
| return temp_path | |
| # if it's numpy array | |
| if isinstance(img_like, np.ndarray): | |
| temp_path = os.path.join(tempfile.mkdtemp() if out_dir is None else out_dir, f"{tag}.png") | |
| os.makedirs(os.path.dirname(temp_path), exist_ok=True) | |
| img_like = Image.fromarray(img_like) | |
| img_like.save(temp_path) | |
| return temp_path | |
| raise ValueError(f"Expected PIL.Image, str, list, or tuple — got {type(img_like)}") | |
| def show_mesh(which, mesh, inp, coarse, fine): | |
| """Switch the displayed texture based on dropdown change.""" | |
| print() | |
| tex_map = { | |
| "Input": image_to_temp_path(inp, "input"), | |
| "Coarse": coarse[0] if isinstance(coarse, tuple) else coarse, | |
| "Fine": fine[0] if isinstance(fine, tuple) else fine, | |
| } | |
| texture_path = tex_map[which] | |
| return apply_texture(mesh, texture_path, which.lower()) | |
| # ---------------------------------------------------------------------- | |
| def create_demo(process): | |
| with gr.Blocks() as demo: | |
| with gr.Row(): | |
| with gr.Column(): | |
| gr.Markdown("## Select preset from the example list, and modify the prompt accordingly") | |
| with gr.Row(): | |
| name = gr.Textbox(label="Name", interactive=False, visible=False) | |
| representative = gr.Image(label="Geometry", interactive=False) | |
| image = gr.Image(label="UV Normal", interactive=False) | |
| prompt = gr.Textbox(label="Prompt", submit_btn=True) | |
| with gr.Accordion("Advanced options", open=False): | |
| num_samples = gr.Slider( | |
| label="Number of images", minimum=1, maximum=MAX_NUM_IMAGES, value=DEFAULT_NUM_IMAGES, step=1 | |
| ) | |
| image_resolution = gr.Slider( | |
| label="Image resolution", | |
| minimum=256, | |
| maximum=MAX_IMAGE_RESOLUTION, | |
| value=DEFAULT_IMAGE_RESOLUTION, | |
| step=256, | |
| ) | |
| num_steps = gr.Slider(label="Number of steps", minimum=1, maximum=100, value=10, step=1) | |
| guidance_scale = gr.Slider(label="Guidance scale", minimum=0.1, maximum=30.0, value=9.0, step=0.1) | |
| seed = gr.Slider(label="Seed", minimum=0, maximum=MAX_SEED, step=1, value=0) | |
| randomize_seed = gr.Checkbox(label="Randomize seed", value=True) | |
| a_prompt = gr.Textbox(label="Additional prompt", value="best quality, extremely detailed") | |
| n_prompt = gr.Textbox( | |
| label="Negative prompt", | |
| value="longbody, lowres, bad anatomy, bad hands, missing fingers, extra digit, fewer digits, cropped, worst quality, low quality", | |
| ) | |
| with gr.Column(): | |
| # 2x2 grid of images for the output textures | |
| gr.Markdown("### Output BRDF") | |
| with gr.Row(): | |
| base_color = gr.Gallery(label="Base Color", show_label=True, columns=1, object_fit="scale-down") | |
| normal = gr.Gallery(label="Displacement Map", show_label=True, columns=1, object_fit="scale-down") | |
| with gr.Row(): | |
| roughness = gr.Gallery(label="Roughness Map", show_label=True, columns=1, object_fit="scale-down") | |
| metallic = gr.Gallery(label="Metallic Map", show_label=True, columns=1, object_fit="scale-down") | |
| gr.Markdown("### Download Packed Blender Files for 3D Visualization") | |
| out_blender_path = gr.File(label="Generated Blender File", file_types=[".blend"]) | |
| inputs = [ | |
| name, # Name of the object | |
| representative, # Geometry mesh | |
| image, | |
| prompt, | |
| a_prompt, | |
| n_prompt, | |
| num_samples, | |
| image_resolution, | |
| num_steps, | |
| guidance_scale, | |
| seed, | |
| ] | |
| # first call → run diffusion / texture network | |
| prompt.submit( | |
| fn=randomize_seed_fn, | |
| inputs=[seed, randomize_seed], | |
| outputs=seed, | |
| queue=False, | |
| api_name=False, | |
| ).then( | |
| fn=process, | |
| inputs=inputs, | |
| outputs=[base_color, normal, roughness, metallic, out_blender_path], | |
| api_name="canny", | |
| concurrency_id="main", | |
| ) | |
| gr.Examples( | |
| fn=process, | |
| inputs=inputs, | |
| outputs=[base_color, normal, roughness, metallic], | |
| examples=[ | |
| [ | |
| "bunny", | |
| "examples/bunny/frame_0001.png", # /dgxusers/Users/jyang/project/ObjectReal/data/control/preprocess/bunny/uv_normal/fused.png | |
| "examples/bunny/uv_normal.png", # /dgxusers/Users/jyang/project/ObjectReal/data/control/preprocess/bunny/uv_normal/fused.png | |
| "feather", | |
| a_prompt.value, | |
| n_prompt.value, | |
| num_samples.value, | |
| image_resolution.value, | |
| num_steps.value, | |
| guidance_scale.value, | |
| seed.value, | |
| ], | |
| [ | |
| "monkey", | |
| "examples/monkey/frame_0001.png", # /dgxusers/Users/jyang/project/ObjectReal/data/control/preprocess/monkey/uv_normal/fused.png | |
| "examples/monkey/uv_normal.png", # /dgxusers/Users/jyang/project/ObjectReal/data/control/preprocess/monkey/uv_normal/fused.png | |
| "wood", | |
| a_prompt.value, | |
| n_prompt.value, | |
| num_samples.value, | |
| image_resolution.value, | |
| num_steps.value, | |
| guidance_scale.value, | |
| seed.value, | |
| ], | |
| [ | |
| "tshirt", | |
| "examples/tshirt/frame_0001.png", # /dgxusers/Users/jyang/project/ObjectReal/data/control/preprocess/monkey/uv_normal/fused.png | |
| "examples/tshirt/uv_normal.png", # /dgxusers/Users/jyang/project/ObjectReal/data/control/preprocess/monkey/uv_normal/fused.png | |
| "wood", | |
| a_prompt.value, | |
| n_prompt.value, | |
| num_samples.value, | |
| image_resolution.value, | |
| num_steps.value, | |
| guidance_scale.value, | |
| seed.value, | |
| ], | |
| # [ | |
| # "highheel", | |
| # "examples/highheel/frame_0001.png", # /dgxusers/Users/jyang/project/ObjectReal/data/control/preprocess/monkey/uv_normal/fused.png | |
| # "examples/highheel/uv_normal.png", # /dgxusers/Users/jyang/project/ObjectReal/data/control/preprocess/monkey/uv_normal/fused.png | |
| # "wood", | |
| # a_prompt.value, | |
| # n_prompt.value, | |
| # num_samples.value, | |
| # image_resolution.value, | |
| # num_steps.value, | |
| # guidance_scale.value, | |
| # seed.value, | |
| # ], | |
| [ | |
| "tank", | |
| "examples/tank/frame_0001.png", # /dgxusers/Users/jyang/project/ObjectReal/data/control/preprocess/monkey/uv_normal/fused.png | |
| "examples/tank/uv_normal.png", # /dgxusers/Users/jyang/project/ObjectReal/data/control/preprocess/monkey/uv_normal/fused.png | |
| "wood", | |
| a_prompt.value, | |
| n_prompt.value, | |
| num_samples.value, | |
| image_resolution.value, | |
| num_steps.value, | |
| guidance_scale.value, | |
| seed.value, | |
| ], | |
| [ | |
| "fighter", | |
| "examples/fighter/frame_0001.png", # /dgxusers/Users/jyang/project/ObjectReal/data/control/preprocess/monkey/uv_normal/fused.png | |
| "examples/fighter/uv_normal.png", # /dgxusers/Users/jyang/project/ObjectReal/data/control/preprocess/monkey/uv_normal/fused.png | |
| "wood", | |
| a_prompt.value, | |
| n_prompt.value, | |
| num_samples.value, | |
| image_resolution.value, | |
| num_steps.value, | |
| guidance_scale.value, | |
| seed.value, | |
| ], | |
| ], | |
| ) | |
| return demo | |
| if __name__ == "__main__": | |
| from model import Model | |
| model = Model(task_name="Texnet") | |
| demo = create_demo(model.process_texnet) | |
| demo.queue().launch() |