Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -728,10 +728,11 @@ if __name__ == "__main__":
|
|
| 728 |
|
| 729 |
model_path = args.model_path
|
| 730 |
filt_invalid = "cut"
|
| 731 |
-
|
| 732 |
#model_name = get_model_name_from_path(args.model_path)
|
| 733 |
model_name = "pangea_llava_qwen"
|
| 734 |
-
tokenizer, model, image_processor, context_len = load_pretrained_model(model_path, None, model_name, args.load_8bit, args.load_4bit, **
|
| 735 |
-
|
|
|
|
| 736 |
chat_image_num = 0
|
| 737 |
demo.launch()
|
|
|
|
| 728 |
|
| 729 |
model_path = args.model_path
|
| 730 |
filt_invalid = "cut"
|
| 731 |
+
lava_kwargs = {"multimodal": True}
|
| 732 |
#model_name = get_model_name_from_path(args.model_path)
|
| 733 |
model_name = "pangea_llava_qwen"
|
| 734 |
+
tokenizer, model, image_processor, context_len = load_pretrained_model(model_path, None, model_name, args.load_8bit, args.load_4bit, **lava_kwargs)
|
| 735 |
+
device = (torch.device("cuda") if torch.cuda.is_available() else torch.device("mps") if getattr(torch.backends, "mps", None) and torch.backends.mps.is_available() else torch.device("cpu"))
|
| 736 |
+
model = model.to(device)
|
| 737 |
chat_image_num = 0
|
| 738 |
demo.launch()
|