update selection
Browse files
app.py
CHANGED
|
@@ -19,25 +19,55 @@ with gr.Blocks(fill_height=True) as demo:
|
|
| 19 |
model_choice = gr.Dropdown(
|
| 20 |
choices=['gpt-4-turbo', 'gpt-4', 'gpt-3.5-turbo'],
|
| 21 |
value='gpt-4-turbo',
|
| 22 |
-
label="Select Model"
|
|
|
|
| 23 |
)
|
| 24 |
-
|
|
|
|
| 25 |
name=model_choice.value,
|
| 26 |
src=openai_gradio.registry,
|
| 27 |
accept_token=True
|
| 28 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29 |
with gr.Tab("Claude"):
|
| 30 |
with gr.Row():
|
| 31 |
claude_model = gr.Dropdown(
|
| 32 |
choices=['claude-3-sonnet-20240229', 'claude-3-opus-20240229'],
|
| 33 |
value='claude-3-sonnet-20240229',
|
| 34 |
-
label="Select Model"
|
|
|
|
| 35 |
)
|
| 36 |
-
|
|
|
|
| 37 |
name=claude_model.value,
|
| 38 |
src=anthropic_gradio.registry,
|
| 39 |
accept_token=True
|
| 40 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
with gr.Tab("Meta Llama-3.2-90B-Vision-Instruct"):
|
| 42 |
gr.load(
|
| 43 |
name='Llama-3.2-90B-Vision-Instruct',
|
|
|
|
| 19 |
model_choice = gr.Dropdown(
|
| 20 |
choices=['gpt-4-turbo', 'gpt-4', 'gpt-3.5-turbo'],
|
| 21 |
value='gpt-4-turbo',
|
| 22 |
+
label="Select Model",
|
| 23 |
+
interactive=True
|
| 24 |
)
|
| 25 |
+
|
| 26 |
+
chatgpt_interface = gr.load(
|
| 27 |
name=model_choice.value,
|
| 28 |
src=openai_gradio.registry,
|
| 29 |
accept_token=True
|
| 30 |
)
|
| 31 |
+
|
| 32 |
+
def update_model(new_model):
|
| 33 |
+
return gr.load(
|
| 34 |
+
name=new_model,
|
| 35 |
+
src=openai_gradio.registry,
|
| 36 |
+
accept_token=True
|
| 37 |
+
)
|
| 38 |
+
|
| 39 |
+
model_choice.change(
|
| 40 |
+
fn=update_model,
|
| 41 |
+
inputs=[model_choice],
|
| 42 |
+
outputs=[chatgpt_interface]
|
| 43 |
+
)
|
| 44 |
with gr.Tab("Claude"):
|
| 45 |
with gr.Row():
|
| 46 |
claude_model = gr.Dropdown(
|
| 47 |
choices=['claude-3-sonnet-20240229', 'claude-3-opus-20240229'],
|
| 48 |
value='claude-3-sonnet-20240229',
|
| 49 |
+
label="Select Model",
|
| 50 |
+
interactive=True
|
| 51 |
)
|
| 52 |
+
|
| 53 |
+
claude_interface = gr.load(
|
| 54 |
name=claude_model.value,
|
| 55 |
src=anthropic_gradio.registry,
|
| 56 |
accept_token=True
|
| 57 |
)
|
| 58 |
+
|
| 59 |
+
def update_claude_model(new_model):
|
| 60 |
+
return gr.load(
|
| 61 |
+
name=new_model,
|
| 62 |
+
src=anthropic_gradio.registry,
|
| 63 |
+
accept_token=True
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
claude_model.change(
|
| 67 |
+
fn=update_claude_model,
|
| 68 |
+
inputs=[claude_model],
|
| 69 |
+
outputs=[claude_interface]
|
| 70 |
+
)
|
| 71 |
with gr.Tab("Meta Llama-3.2-90B-Vision-Instruct"):
|
| 72 |
gr.load(
|
| 73 |
name='Llama-3.2-90B-Vision-Instruct',
|