Update ui_components.py
Browse files- ui_components.py +30 -14
ui_components.py
CHANGED
|
@@ -224,7 +224,7 @@ class UIComponents:
|
|
| 224 |
json_fmt["json_schema"]["strict"] = True
|
| 225 |
params["response_format"] = json_fmt
|
| 226 |
|
| 227 |
-
# tools
|
| 228 |
tools = None
|
| 229 |
if tools_json and tools_json.strip():
|
| 230 |
try:
|
|
@@ -235,18 +235,17 @@ class UIComponents:
|
|
| 235 |
return gr.Markdown("❌ Tools must be a JSON array.", visible=True)
|
| 236 |
except Exception as e:
|
| 237 |
return gr.Markdown(f"❌ Invalid tools JSON: {e}", visible=True)
|
|
|
|
| 238 |
if tools is not None:
|
| 239 |
params["tools"] = tools
|
| 240 |
-
|
| 241 |
-
|
| 242 |
-
|
| 243 |
-
|
| 244 |
-
|
| 245 |
-
|
| 246 |
-
|
| 247 |
-
|
| 248 |
-
if tool_prompt and tool_prompt.strip():
|
| 249 |
-
params["tool_prompt"] = tool_prompt.strip()
|
| 250 |
self.mcp_client.set_generation_params(params)
|
| 251 |
return gr.Markdown("✅ Inference parameters updated.")
|
| 252 |
|
|
@@ -603,13 +602,30 @@ class UIComponents:
|
|
| 603 |
outputs=[self.api_status, self.reasoning_group]
|
| 604 |
)
|
| 605 |
|
| 606 |
-
# Connect chat
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 607 |
chat_submit = self.chat_input.submit(
|
| 608 |
-
|
| 609 |
inputs=[self.chat_input, chatbot],
|
| 610 |
outputs=[chatbot, self.chat_input]
|
| 611 |
)
|
| 612 |
-
|
| 613 |
|
| 614 |
# Connect server management with proper button state handling
|
| 615 |
self.add_server_btn.click(
|
|
|
|
| 224 |
json_fmt["json_schema"]["strict"] = True
|
| 225 |
params["response_format"] = json_fmt
|
| 226 |
|
| 227 |
+
# tools (OpenAI-style tools are optional; only send tool_choice/tool_prompt if tools are provided)
|
| 228 |
tools = None
|
| 229 |
if tools_json and tools_json.strip():
|
| 230 |
try:
|
|
|
|
| 235 |
return gr.Markdown("❌ Tools must be a JSON array.", visible=True)
|
| 236 |
except Exception as e:
|
| 237 |
return gr.Markdown(f"❌ Invalid tools JSON: {e}", visible=True)
|
| 238 |
+
|
| 239 |
if tools is not None:
|
| 240 |
params["tools"] = tools
|
| 241 |
+
# tool_choice (only valid when tools are present)
|
| 242 |
+
if tool_choice in ("auto", "none", "required"):
|
| 243 |
+
params["tool_choice"] = tool_choice
|
| 244 |
+
elif tool_choice == "function" and tool_function_name:
|
| 245 |
+
params["tool_choice"] = {"type": "function", "function": {"name": tool_function_name}}
|
| 246 |
+
# tool_prompt (only meaningful when tools are present)
|
| 247 |
+
if tool_prompt and tool_prompt.strip():
|
| 248 |
+
params["tool_prompt"] = tool_prompt.strip()
|
|
|
|
|
|
|
| 249 |
self.mcp_client.set_generation_params(params)
|
| 250 |
return gr.Markdown("✅ Inference parameters updated.")
|
| 251 |
|
|
|
|
| 602 |
outputs=[self.api_status, self.reasoning_group]
|
| 603 |
)
|
| 604 |
|
| 605 |
+
# Connect chat with streaming generator for incremental updates
|
| 606 |
+
def submit_message_stream(message, history):
|
| 607 |
+
if message and (message.get("text", "").strip() or message.get("files", [])):
|
| 608 |
+
converted_history = []
|
| 609 |
+
for msg in history:
|
| 610 |
+
if isinstance(msg, dict):
|
| 611 |
+
converted_history.append(ChatMessage(
|
| 612 |
+
role=msg.get('role', 'assistant'),
|
| 613 |
+
content=msg.get('content', ''),
|
| 614 |
+
metadata=msg.get('metadata', None)
|
| 615 |
+
))
|
| 616 |
+
else:
|
| 617 |
+
converted_history.append(msg)
|
| 618 |
+
# Delegate to streaming generator in ChatHandler
|
| 619 |
+
yield from self.chat_handler.process_multimodal_message_stream(message, converted_history)
|
| 620 |
+
return
|
| 621 |
+
yield history, gr.MultimodalTextbox(value=None, interactive=False)
|
| 622 |
+
|
| 623 |
chat_submit = self.chat_input.submit(
|
| 624 |
+
submit_message_stream,
|
| 625 |
inputs=[self.chat_input, chatbot],
|
| 626 |
outputs=[chatbot, self.chat_input]
|
| 627 |
)
|
| 628 |
+
# No .then needed; generator yields a final interactive=True state
|
| 629 |
|
| 630 |
# Connect server management with proper button state handling
|
| 631 |
self.add_server_btn.click(
|