Spaces:
Build error
Build error
| import os | |
| import gradio as gr | |
| import copy | |
| from llama_cpp import Llama | |
| from huggingface_hub import hf_hub_download | |
| from trimesh.exchange.gltf import export_glb | |
| import trimesh | |
| import numpy as np | |
| import tempfile | |
| # Initialize Llama model from Hugging Face | |
| model = Llama( | |
| model_path=hf_hub_download( | |
| repo_id=os.environ.get("REPO_ID", "bartowski/LLaMA-Mesh-GGUF"), | |
| filename=os.environ.get("MODEL_FILE", "LLaMA-Mesh-Q4_K_L.gguf"), | |
| ) | |
| ) | |
| DESCRIPTION = ''' | |
| <div> | |
| <h1 style="text-align: center;">LLaMA-Mesh</h1> | |
| <p>LLaMA-Mesh: Unifying 3D Mesh Generation with Language Models.</p> | |
| <p>Supports up to 4096 tokens. Run locally for 8k token context.</p> | |
| <p>To generate another mesh, click "clear" and start a new dialog.</p> | |
| </div> | |
| ''' | |
| LICENSE = """ | |
| <p/>--- Built with Meta Llama 3.1 8B --- | |
| """ | |
| PLACEHOLDER = """ | |
| <div style="padding: 30px; text-align: center;"> | |
| <h1 style="font-size: 28px; opacity: 0.55;">LLaMA-Mesh</h1> | |
| <p style="font-size: 18px; opacity: 0.65;">Create 3D meshes by chatting.</p> | |
| </div> | |
| """ | |
| css = """ | |
| h1 { | |
| text-align: center; | |
| } | |
| #duplicate-button { | |
| margin: auto; | |
| color: white; | |
| background: #1565c0; | |
| border-radius: 100vh; | |
| } | |
| """ | |
| def generate_text(message, history, max_tokens=2048, temperature=0.9, top_p=0.95): | |
| temp = "" | |
| response = model.create_chat_completion( | |
| messages=[{"role": "user", "content": message}], | |
| temperature=temperature, | |
| max_tokens=max_tokens, | |
| top_p=top_p, | |
| stream=True, | |
| ) | |
| for streamed in response: | |
| delta = streamed["choices"][0].get("delta", {}) | |
| #print(delta) | |
| text_chunk = delta.get("content", "") | |
| temp += text_chunk | |
| yield temp | |
| def apply_gradient_color(mesh_text): | |
| temp_file = tempfile.NamedTemporaryFile(suffix=".obj", delete=False).name | |
| with open(temp_file, "w") as f: | |
| f.write(mesh_text) | |
| mesh = trimesh.load_mesh(temp_file, file_type='obj') | |
| vertices = mesh.vertices | |
| y_values = vertices[:, 1] | |
| y_normalized = (y_values - y_values.min()) / (y_values.max() - y_values.min()) | |
| colors = np.zeros((len(vertices), 4)) | |
| colors[:, 0] = y_normalized | |
| colors[:, 2] = 1 - y_normalized | |
| colors[:, 3] = 1.0 | |
| mesh.visual.vertex_colors = colors | |
| glb_path = temp_file.replace(".obj", ".glb") | |
| with open(glb_path, "wb") as f: | |
| f.write(export_glb(mesh)) | |
| return glb_path | |
| with gr.Blocks(css=css) as demo: | |
| gr.Markdown(DESCRIPTION) | |
| chatbot = gr.ChatInterface( | |
| generate_text, | |
| title="LLaMA-Mesh | GGUF Integration", | |
| description="Supports generating 3D meshes with LLaMA-GGUF.", | |
| examples=[ | |
| ['Create a 3D model of a wooden hammer'], | |
| ['Create a 3D model of a pyramid in OBJ format'], | |
| ['Create a 3D model of a table.'], | |
| ], | |
| cache_examples=False, | |
| additional_inputs=[ | |
| gr.Slider(minimum=4096, maximum=16384, value=8192, step=1, label="Max new tokens"), | |
| gr.Slider(minimum=0.1, maximum=1.5, value=0.9, step=0.1, label="Temperature"), | |
| gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"), | |
| ], | |
| ) | |
| gr.Markdown("### 3D Mesh Visualization") | |
| mesh_input = gr.Textbox( | |
| label="3D Mesh Input", | |
| placeholder="Paste your 3D mesh in OBJ format here...", | |
| lines=5, | |
| ) | |
| visualize_button = gr.Button("Visualize 3D Mesh") | |
| output_model = gr.Model3D(label="3D Mesh Visualization") | |
| visualize_button.click( | |
| fn=apply_gradient_color, | |
| inputs=[mesh_input], | |
| outputs=[output_model] | |
| ) | |
| gr.Markdown(LICENSE) | |
| # Launch the demo | |
| if __name__ == "__main__": | |
| demo.launch() |