Fix: move inputs to model device in inference example to avoid "same device" error (#3)
Browse files- Fix: move inputs to model device in inference example to avoid "same device" error (111a23f9284af5056a970f40d890a8e433b41e84)
Co-authored-by: Lim Seong Geun <lim4349@users.noreply.huggingface.co>
README.md
CHANGED
|
@@ -116,6 +116,7 @@ inputs = processor.apply_chat_template(
|
|
| 116 |
return_dict=True,
|
| 117 |
return_tensors="pt"
|
| 118 |
)
|
|
|
|
| 119 |
|
| 120 |
# Inference: Generation of the output
|
| 121 |
generated_ids = model.generate(**inputs, max_new_tokens=128)
|
|
|
|
| 116 |
return_dict=True,
|
| 117 |
return_tensors="pt"
|
| 118 |
)
|
| 119 |
+
inputs = inputs.to(model.device)
|
| 120 |
|
| 121 |
# Inference: Generation of the output
|
| 122 |
generated_ids = model.generate(**inputs, max_new_tokens=128)
|