Spaces:
Running
Running
remove task
Browse files
app.py
CHANGED
|
@@ -32,16 +32,14 @@ from optimum.intel import (
|
|
| 32 |
OVWeightQuantizationConfig,
|
| 33 |
)
|
| 34 |
|
| 35 |
-
HF_TOKEN = os.environ.get("HF_TOKEN")
|
| 36 |
-
|
| 37 |
-
|
| 38 |
def process_model(
|
| 39 |
model_id: str,
|
| 40 |
dtype: str,
|
| 41 |
private_repo: bool,
|
| 42 |
-
task: str,
|
| 43 |
oauth_token: gr.OAuthToken,
|
| 44 |
):
|
|
|
|
| 45 |
if oauth_token.token is None:
|
| 46 |
raise ValueError("You must be logged in to use this space")
|
| 47 |
|
|
@@ -168,17 +166,12 @@ private_repo = gr.Checkbox(
|
|
| 168 |
label="Private Repo",
|
| 169 |
info="Create a private repo under your username",
|
| 170 |
)
|
| 171 |
-
task = gr.Textbox(
|
| 172 |
-
value="auto",
|
| 173 |
-
label="Task : can be left to auto, will be automatically inferred",
|
| 174 |
-
)
|
| 175 |
interface = gr.Interface(
|
| 176 |
fn=process_model,
|
| 177 |
inputs=[
|
| 178 |
model_id,
|
| 179 |
dtype,
|
| 180 |
private_repo,
|
| 181 |
-
task,
|
| 182 |
],
|
| 183 |
outputs=[
|
| 184 |
gr.Markdown(label="output"),
|
|
|
|
| 32 |
OVWeightQuantizationConfig,
|
| 33 |
)
|
| 34 |
|
|
|
|
|
|
|
|
|
|
| 35 |
def process_model(
|
| 36 |
model_id: str,
|
| 37 |
dtype: str,
|
| 38 |
private_repo: bool,
|
| 39 |
+
# task: str,
|
| 40 |
oauth_token: gr.OAuthToken,
|
| 41 |
):
|
| 42 |
+
task = "auto"
|
| 43 |
if oauth_token.token is None:
|
| 44 |
raise ValueError("You must be logged in to use this space")
|
| 45 |
|
|
|
|
| 166 |
label="Private Repo",
|
| 167 |
info="Create a private repo under your username",
|
| 168 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 169 |
interface = gr.Interface(
|
| 170 |
fn=process_model,
|
| 171 |
inputs=[
|
| 172 |
model_id,
|
| 173 |
dtype,
|
| 174 |
private_repo,
|
|
|
|
| 175 |
],
|
| 176 |
outputs=[
|
| 177 |
gr.Markdown(label="output"),
|