Spaces:
Paused
Paused
Update app.py
Browse fileschange order of examples
app.py
CHANGED
|
@@ -28,7 +28,7 @@ def get_model_summary(model_name):
|
|
| 28 |
try:
|
| 29 |
# Fetch the config.json file
|
| 30 |
config_url = f"https://huggingface.co/{model_name}/raw/main/config.json"
|
| 31 |
-
headers = {"Authorization": f"Bearer {
|
| 32 |
response = requests.get(config_url, headers=headers)
|
| 33 |
response.raise_for_status()
|
| 34 |
config = response.json()
|
|
@@ -69,15 +69,15 @@ with gr.Blocks() as demo:
|
|
| 69 |
gr.Markdown("### Vision Models")
|
| 70 |
vision_examples = gr.Examples(
|
| 71 |
examples=[
|
| 72 |
-
["microsoft/llava-med-v1.5-mistral-7b"],
|
| 73 |
["llava-hf/llava-v1.6-mistral-7b-hf"],
|
| 74 |
["xtuner/llava-phi-3-mini-hf"],
|
| 75 |
-
["xtuner/llava-llama-3-8b-v1_1-transformers"],
|
| 76 |
["vikhyatk/moondream2"],
|
| 77 |
["openbmb/MiniCPM-Llama3-V-2_5"],
|
| 78 |
["microsoft/Phi-3-vision-128k-instruct"],
|
| 79 |
["google/paligemma-3b-mix-224"],
|
| 80 |
-
["HuggingFaceM4/idefics2-8b-chatty"]
|
|
|
|
| 81 |
],
|
| 82 |
inputs=textbox
|
| 83 |
)
|
|
|
|
| 28 |
try:
|
| 29 |
# Fetch the config.json file
|
| 30 |
config_url = f"https://huggingface.co/{model_name}/raw/main/config.json"
|
| 31 |
+
headers = {"Authorization": f"Bearer {HF_TOKEN}"}
|
| 32 |
response = requests.get(config_url, headers=headers)
|
| 33 |
response.raise_for_status()
|
| 34 |
config = response.json()
|
|
|
|
| 69 |
gr.Markdown("### Vision Models")
|
| 70 |
vision_examples = gr.Examples(
|
| 71 |
examples=[
|
|
|
|
| 72 |
["llava-hf/llava-v1.6-mistral-7b-hf"],
|
| 73 |
["xtuner/llava-phi-3-mini-hf"],
|
| 74 |
+
["xtuner/llava-llama-3-8b-v1_1-transformers"],
|
| 75 |
["vikhyatk/moondream2"],
|
| 76 |
["openbmb/MiniCPM-Llama3-V-2_5"],
|
| 77 |
["microsoft/Phi-3-vision-128k-instruct"],
|
| 78 |
["google/paligemma-3b-mix-224"],
|
| 79 |
+
["HuggingFaceM4/idefics2-8b-chatty"],
|
| 80 |
+
["microsoft/llava-med-v1.5-mistral-7b"]
|
| 81 |
],
|
| 82 |
inputs=textbox
|
| 83 |
)
|