Spaces:
Running
on
Zero
Running
on
Zero
Yurii Paniv
commited on
Commit
·
a9df991
1
Parent(s):
084a0ab
Handle zeroGPU
Browse files
app.py
CHANGED
|
@@ -47,11 +47,9 @@ if getenv("HF_API_TOKEN") is not None:
|
|
| 47 |
#log_queue = Queue()
|
| 48 |
#t = Thread(target=check_thread, args=(log_queue,))
|
| 49 |
#t.start()
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
private=True,
|
| 54 |
-
)
|
| 55 |
else:
|
| 56 |
print("No HF_API_TOKEN found. Logging is disabled.")
|
| 57 |
|
|
@@ -84,15 +82,11 @@ def translate(input_text):
|
|
| 84 |
print(f"{datetime.utcnow()} | Translating: {input_text}")
|
| 85 |
|
| 86 |
if getenv("HF_API_TOKEN") is not None:
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
)
|
| 93 |
-
logging_callback([input_text])
|
| 94 |
-
except:
|
| 95 |
-
print("Error happened while pushing data to HF.")
|
| 96 |
|
| 97 |
input_text = f"[INST] {input_text} [/INST]"
|
| 98 |
inputs = tokenizer([input_text], return_tensors="pt").to(model.device)
|
|
|
|
| 47 |
#log_queue = Queue()
|
| 48 |
#t = Thread(target=check_thread, args=(log_queue,))
|
| 49 |
#t.start()
|
| 50 |
+
|
| 51 |
+
# handle zeroGPU
|
| 52 |
+
log_queue = None
|
|
|
|
|
|
|
| 53 |
else:
|
| 54 |
print("No HF_API_TOKEN found. Logging is disabled.")
|
| 55 |
|
|
|
|
| 82 |
print(f"{datetime.utcnow()} | Translating: {input_text}")
|
| 83 |
|
| 84 |
if getenv("HF_API_TOKEN") is not None:
|
| 85 |
+
if log_queue is None:
|
| 86 |
+
log_queue = Queue()
|
| 87 |
+
t = Thread(target=check_thread, args=(log_queue,))
|
| 88 |
+
t.start()
|
| 89 |
+
log_queue.put([input_text])
|
|
|
|
|
|
|
|
|
|
|
|
|
| 90 |
|
| 91 |
input_text = f"[INST] {input_text} [/INST]"
|
| 92 |
inputs = tokenizer([input_text], return_tensors="pt").to(model.device)
|