Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
|
@@ -15,11 +15,11 @@ from apscheduler.schedulers.background import BackgroundScheduler
|
|
| 15 |
CONVERSION_SCRIPT = "convert_lora_to_gguf.py"
|
| 16 |
|
| 17 |
def process_model(peft_model_id: str, base_model_id: str, q_method: str, private_repo, oauth_token: gr.OAuthToken | None):
|
| 18 |
-
if oauth_token is None or oauth_token.token is None:
|
| 19 |
-
raise gr.Error("You must be logged in to use GGUF-my-lora")
|
| 20 |
-
|
| 21 |
# validate the oauth token
|
| 22 |
-
|
|
|
|
|
|
|
|
|
|
| 23 |
|
| 24 |
model_name = peft_model_id.split('/')[-1]
|
| 25 |
gguf_output_name = f"{model_name}-{q_method.lower()}.gguf"
|
|
|
|
| 15 |
CONVERSION_SCRIPT = "convert_lora_to_gguf.py"
|
| 16 |
|
| 17 |
def process_model(peft_model_id: str, base_model_id: str, q_method: str, private_repo, oauth_token: gr.OAuthToken | None):
|
|
|
|
|
|
|
|
|
|
| 18 |
# validate the oauth token
|
| 19 |
+
try:
|
| 20 |
+
whoami(oauth_token.token)
|
| 21 |
+
except Exception as e:
|
| 22 |
+
raise gr.Error("You must be logged in to use GGUF-my-lora")
|
| 23 |
|
| 24 |
model_name = peft_model_id.split('/')[-1]
|
| 25 |
gguf_output_name = f"{model_name}-{q_method.lower()}.gguf"
|