Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -19,18 +19,19 @@ os.makedirs('./temp', exist_ok=True)
|
|
| 19 |
|
| 20 |
print('\n\n\n')
|
| 21 |
print('Loading model...')
|
| 22 |
-
cpu_pipe = transformers.pipeline(
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
)
|
| 28 |
gpu_pipe = transformers.pipeline(
|
| 29 |
'text-generation',
|
| 30 |
model='dx2102/llama-midi',
|
| 31 |
torch_dtype='bfloat16',
|
| 32 |
device='cuda:0',
|
| 33 |
)
|
|
|
|
| 34 |
# print devices
|
| 35 |
print(f"{gpu_pipe.device = }, {gpu_pipe.model.device = }")
|
| 36 |
print(f"{cpu_pipe.device = }, {cpu_pipe.model.device = }")
|
|
|
|
| 19 |
|
| 20 |
print('\n\n\n')
|
| 21 |
print('Loading model...')
|
| 22 |
+
# cpu_pipe = transformers.pipeline(
|
| 23 |
+
# 'text-generation',
|
| 24 |
+
# model='dx2102/llama-midi',
|
| 25 |
+
# torch_dtype='float32',
|
| 26 |
+
# device='cpu',
|
| 27 |
+
# )
|
| 28 |
gpu_pipe = transformers.pipeline(
|
| 29 |
'text-generation',
|
| 30 |
model='dx2102/llama-midi',
|
| 31 |
torch_dtype='bfloat16',
|
| 32 |
device='cuda:0',
|
| 33 |
)
|
| 34 |
+
cpu_pipe = gpu_pipe
|
| 35 |
# print devices
|
| 36 |
print(f"{gpu_pipe.device = }, {gpu_pipe.model.device = }")
|
| 37 |
print(f"{cpu_pipe.device = }, {cpu_pipe.model.device = }")
|