dx2102 commited on
Commit
99c677a
·
verified ·
1 Parent(s): c6074a4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -8
app.py CHANGED
@@ -19,20 +19,18 @@ os.makedirs('./temp', exist_ok=True)
19
 
20
  print('\n\n\n')
21
  print('Loading model...')
22
- gpu_pipe = transformers.pipeline(
23
- 'text-generation',
24
- model='dx2102/llama-midi',
25
- # revision='c303c108399aba837146e893375849b918f413b3',
26
- torch_dtype='bfloat16',
27
- device='cuda',
28
- )
29
  cpu_pipe = transformers.pipeline(
30
  'text-generation',
31
  model='dx2102/llama-midi',
32
- # revision='c303c108399aba837146e893375849b918f413b3',
33
  torch_dtype='float32',
34
  device='cpu',
35
  )
 
 
 
 
 
 
36
  # print devices
37
  print(f"{gpu_pipe.device = }, {cpu_pipe.device = }")
38
  print(f"{gpu_pipe.model.device = }, {cpu_pipe.model.device = }")
 
19
 
20
  print('\n\n\n')
21
  print('Loading model...')
 
 
 
 
 
 
 
22
  cpu_pipe = transformers.pipeline(
23
  'text-generation',
24
  model='dx2102/llama-midi',
 
25
  torch_dtype='float32',
26
  device='cpu',
27
  )
28
+ gpu_pipe = transformers.pipeline(
29
+ 'text-generation',
30
+ model='dx2102/llama-midi',
31
+ torch_dtype='bfloat16',
32
+ device='cuda',
33
+ )
34
  # print devices
35
  print(f"{gpu_pipe.device = }, {cpu_pipe.device = }")
36
  print(f"{gpu_pipe.model.device = }, {cpu_pipe.model.device = }")