Spaces:
Runtime error
Runtime error
update
Browse files
app.py
CHANGED
|
@@ -34,7 +34,7 @@ magma_qa_prompt = "<image>\n{} Answer the question briefly."
|
|
| 34 |
magma_model_id = "microsoft/Magma-8B"
|
| 35 |
magam_model = AutoModelForCausalLM.from_pretrained(magma_model_id, trust_remote_code=True, torch_dtype=dtype)
|
| 36 |
magma_processor = AutoProcessor.from_pretrained(magma_model_id, trust_remote_code=True)
|
| 37 |
-
|
| 38 |
|
| 39 |
# Download the entire repository
|
| 40 |
# snapshot_download(repo_id=repo_id, local_dir=local_dir)
|
|
@@ -67,7 +67,7 @@ MARKDOWN = """
|
|
| 67 |
|
| 68 |
\[[arXiv Paper](https://www.arxiv.org/pdf/2502.13130)\] \[[Project Page](https://microsoft.github.io/Magma/)\] \[[Github Repo](https://github.com/microsoft/Magma)\] \[[Hugging Face Model](https://huggingface.co/microsoft/Magma-8B)\]
|
| 69 |
|
| 70 |
-
This demo is powered by [Gradio](https://gradio.app/) and uses OmniParserv2 to generate Set-of-Mark prompts.
|
| 71 |
</div>
|
| 72 |
"""
|
| 73 |
|
|
|
|
| 34 |
magma_model_id = "microsoft/Magma-8B"
|
| 35 |
magam_model = AutoModelForCausalLM.from_pretrained(magma_model_id, trust_remote_code=True, torch_dtype=dtype)
|
| 36 |
magma_processor = AutoProcessor.from_pretrained(magma_model_id, trust_remote_code=True)
|
| 37 |
+
magam_model.to("cuda")
|
| 38 |
|
| 39 |
# Download the entire repository
|
| 40 |
# snapshot_download(repo_id=repo_id, local_dir=local_dir)
|
|
|
|
| 67 |
|
| 68 |
\[[arXiv Paper](https://www.arxiv.org/pdf/2502.13130)\] \[[Project Page](https://microsoft.github.io/Magma/)\] \[[Github Repo](https://github.com/microsoft/Magma)\] \[[Hugging Face Model](https://huggingface.co/microsoft/Magma-8B)\]
|
| 69 |
|
| 70 |
+
This demo is powered by [Gradio](https://gradio.app/) and uses [OmniParserv2](https://github.com/microsoft/OmniParser) to generate [Set-of-Mark prompts](https://github.com/microsoft/SoM).
|
| 71 |
</div>
|
| 72 |
"""
|
| 73 |
|