Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -23,7 +23,7 @@ from transformers.image_utils import load_image
|
|
| 23 |
# Constants for text generation
|
| 24 |
MAX_MAX_NEW_TOKENS = 2048
|
| 25 |
DEFAULT_MAX_NEW_TOKENS = 1024
|
| 26 |
-
MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "
|
| 27 |
|
| 28 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
| 29 |
|
|
@@ -37,7 +37,7 @@ model_m = Qwen2_5_VLForConditionalGeneration.from_pretrained(
|
|
| 37 |
).to("cuda").eval()
|
| 38 |
|
| 39 |
# Load MiMo-VL-7B-RL
|
| 40 |
-
MODEL_ID_X = "XiaomiMiMo/MiMo-VL-7B-
|
| 41 |
processor_x = AutoProcessor.from_pretrained(MODEL_ID_X, trust_remote_code=True)
|
| 42 |
model_x = Qwen2_5_VLForConditionalGeneration.from_pretrained(
|
| 43 |
MODEL_ID_X,
|
|
@@ -130,7 +130,7 @@ def generate_video(model_name: str, text: str, video_path: str,
|
|
| 130 |
if model_name == "Cosmos-Reason1-7B":
|
| 131 |
processor = processor_m
|
| 132 |
model = model_m
|
| 133 |
-
elif model_name == "MiMo-VL-7B-
|
| 134 |
processor = processor_x
|
| 135 |
model = model_x
|
| 136 |
else:
|
|
@@ -232,7 +232,7 @@ with gr.Blocks(css=css, theme="bethecloud/storj_theme") as demo:
|
|
| 232 |
with gr.Column():
|
| 233 |
output = gr.Textbox(label="Output", interactive=False)
|
| 234 |
model_choice = gr.Dropdown(
|
| 235 |
-
choices=["Cosmos-Reason1-7B", "MiMo-VL-7B-
|
| 236 |
label="Select Model",
|
| 237 |
value="Cosmos-Reason1-7B")
|
| 238 |
|
|
|
|
| 23 |
# Constants for text generation
|
| 24 |
MAX_MAX_NEW_TOKENS = 2048
|
| 25 |
DEFAULT_MAX_NEW_TOKENS = 1024
|
| 26 |
+
MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
|
| 27 |
|
| 28 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
| 29 |
|
|
|
|
| 37 |
).to("cuda").eval()
|
| 38 |
|
| 39 |
# Load MiMo-VL-7B-RL
|
| 40 |
+
MODEL_ID_X = "XiaomiMiMo/MiMo-VL-7B-SFT"
|
| 41 |
processor_x = AutoProcessor.from_pretrained(MODEL_ID_X, trust_remote_code=True)
|
| 42 |
model_x = Qwen2_5_VLForConditionalGeneration.from_pretrained(
|
| 43 |
MODEL_ID_X,
|
|
|
|
| 130 |
if model_name == "Cosmos-Reason1-7B":
|
| 131 |
processor = processor_m
|
| 132 |
model = model_m
|
| 133 |
+
elif model_name == "MiMo-VL-7B-SFT":
|
| 134 |
processor = processor_x
|
| 135 |
model = model_x
|
| 136 |
else:
|
|
|
|
| 232 |
with gr.Column():
|
| 233 |
output = gr.Textbox(label="Output", interactive=False)
|
| 234 |
model_choice = gr.Dropdown(
|
| 235 |
+
choices=["Cosmos-Reason1-7B", "MiMo-VL-7B-SFT"],
|
| 236 |
label="Select Model",
|
| 237 |
value="Cosmos-Reason1-7B")
|
| 238 |
|