Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Felix Marty
commited on
Commit
Β·
8513f15
1
Parent(s):
7567dc4
style
Browse files
app.py
CHANGED
|
@@ -51,17 +51,17 @@ def onnx_export(token: str, model_id: str, task: str) -> str:
|
|
| 51 |
commit_url = repo.push_to_hub()
|
| 52 |
print("[dataset]", commit_url)
|
| 53 |
|
| 54 |
-
return f"
|
| 55 |
except Exception as e:
|
| 56 |
-
return f"
|
| 57 |
|
| 58 |
DESCRIPTION = """
|
| 59 |
<p align="center">
|
| 60 |
<img src="https://huggingface.co/spaces/optimum/exporters/resolve/main/clean_hf_onnx.png"/>
|
| 61 |
</p>
|
| 62 |
|
| 63 |
-
<p align="center">
|
| 64 |
-
|
| 65 |
</p>
|
| 66 |
|
| 67 |
This Space allows to automatically convert to ONNX π€ transformers models hosted on the Hugging Face Hub. It opens a PR on the target model, and it is up to the owner of the original model
|
|
@@ -71,7 +71,7 @@ Once converted, the model can for example be used in the [π€ Optimum](https://
|
|
| 71 |
Check out [this guide](https://huggingface.co/docs/optimum/main/en/onnxruntime/usage_guides/models) to see how!
|
| 72 |
|
| 73 |
The steps are the following:
|
| 74 |
-
- Paste a read-access token from
|
| 75 |
- Input a model id from the Hub (for example:)
|
| 76 |
- If necessary, input the task for this model.
|
| 77 |
- Click "Convert to ONNX"
|
|
@@ -83,7 +83,7 @@ Note: in case the model to convert is larger than 2 GB, it will be saved in a su
|
|
| 83 |
with gr.Blocks() as demo:
|
| 84 |
gr.Markdown(DESCRIPTION)
|
| 85 |
|
| 86 |
-
with gr.
|
| 87 |
input_token = gr.Textbox(max_lines=1, label="Hugging Face token")
|
| 88 |
input_model = gr.Textbox(max_lines=1, label="Model name", placeholder="textattack/distilbert-base-cased-CoLA")
|
| 89 |
input_task = gr.Textbox(value="auto", max_lines=1, label="Task (can be left blank, will be automatically inferred)")
|
|
|
|
| 51 |
commit_url = repo.push_to_hub()
|
| 52 |
print("[dataset]", commit_url)
|
| 53 |
|
| 54 |
+
return f"#### Success π₯ Yay! This model was successfully converted and a PR was open using your token, here: {commit_info.pr_url}]({commit_info.pr_url})"
|
| 55 |
except Exception as e:
|
| 56 |
+
return f"#### Error: {e}"
|
| 57 |
|
| 58 |
DESCRIPTION = """
|
| 59 |
<p align="center">
|
| 60 |
<img src="https://huggingface.co/spaces/optimum/exporters/resolve/main/clean_hf_onnx.png"/>
|
| 61 |
</p>
|
| 62 |
|
| 63 |
+
<p align="center"; style="font-weight: 900; margin-bottom: 10px; margin-top: 10px>
|
| 64 |
+
Convert any PyTorch model to ONNX with π€ Optimum exporters ποΈ
|
| 65 |
</p>
|
| 66 |
|
| 67 |
This Space allows to automatically convert to ONNX π€ transformers models hosted on the Hugging Face Hub. It opens a PR on the target model, and it is up to the owner of the original model
|
|
|
|
| 71 |
Check out [this guide](https://huggingface.co/docs/optimum/main/en/onnxruntime/usage_guides/models) to see how!
|
| 72 |
|
| 73 |
The steps are the following:
|
| 74 |
+
- Paste a read-access token from https://huggingface.co/settings/tokens . Read access is enough given that we will open a PR against the source repo.
|
| 75 |
- Input a model id from the Hub (for example:)
|
| 76 |
- If necessary, input the task for this model.
|
| 77 |
- Click "Convert to ONNX"
|
|
|
|
| 83 |
with gr.Blocks() as demo:
|
| 84 |
gr.Markdown(DESCRIPTION)
|
| 85 |
|
| 86 |
+
with gr.Column():
|
| 87 |
input_token = gr.Textbox(max_lines=1, label="Hugging Face token")
|
| 88 |
input_model = gr.Textbox(max_lines=1, label="Model name", placeholder="textattack/distilbert-base-cased-CoLA")
|
| 89 |
input_task = gr.Textbox(value="auto", max_lines=1, label="Task (can be left blank, will be automatically inferred)")
|