Spaces:
Running
Running
| import os | |
| import uuid | |
| import json | |
| import gradio as gr | |
| import modelscope_studio.components.antd as antd | |
| import modelscope_studio.components.antdx as antdx | |
| import modelscope_studio.components.base as ms | |
| from openai import OpenAI | |
| import requests | |
| from typing import Generator, Dict, Any | |
| import logging | |
| import time | |
| import pdfplumber | |
| import tempfile | |
| # =========== Configuration | |
| # MODEL NAME | |
| model = os.getenv("MODEL_NAME") | |
| # Proxy server configuration | |
| PROXY_BASE_URL = os.getenv("PROXY_API_BASE", "http://localhost:8000") | |
| PROXY_TIMEOUT = int(os.getenv("PROXY_TIMEOUT", 30)) | |
| MAX_RETRIES = int(os.getenv("MAX_RETRIES", 5)) | |
| # Save history | |
| save_history = True | |
| # =========== Configuration | |
| # Configure logging | |
| logging.basicConfig(level=logging.INFO) | |
| logger = logging.getLogger(__name__) | |
| class DeltaObject: | |
| """Simulate OpenAI Delta object""" | |
| def __init__(self, data: dict): | |
| self.content = data.get('content') | |
| self.role = data.get('role') | |
| class ChoiceObject: | |
| """Simulate OpenAI Choice object""" | |
| def __init__(self, choice_data: dict): | |
| delta_data = choice_data.get('delta', {}) | |
| self.delta = DeltaObject(delta_data) | |
| self.finish_reason = choice_data.get('finish_reason') | |
| self.index = choice_data.get('index', 0) | |
| class ChunkObject: | |
| """Simulate OpenAI Chunk object""" | |
| def __init__(self, chunk_data: dict): | |
| choices_data = chunk_data.get('choices', []) | |
| self.choices = [ChoiceObject(choice) for choice in choices_data] | |
| self.id = chunk_data.get('id', '') | |
| self.object = chunk_data.get('object', 'chat.completion.chunk') | |
| self.created = chunk_data.get('created', 0) | |
| self.model = chunk_data.get('model', '') | |
| class ProxyClient: | |
| """Proxy client for communicating with the intermediate service""" | |
| def __init__(self, base_url: str, timeout: int = 30): | |
| self.base_url = base_url.rstrip('/') | |
| self.timeout = timeout | |
| self.session = requests.Session() | |
| def chat_completions_create(self, model: str, messages: list, stream: bool = True, **kwargs): | |
| """Create chat completion request""" | |
| url = f"{self.base_url}/chat/completions" | |
| payload = { | |
| "model": model, | |
| "messages": messages, | |
| "stream": stream, | |
| **kwargs | |
| } | |
| try: | |
| response = self.session.post( | |
| url, | |
| json=payload, | |
| stream=stream, | |
| timeout=self.timeout, | |
| headers={"Content-Type": "application/json"} | |
| ) | |
| response.raise_for_status() | |
| if stream: | |
| return self._parse_stream_response(response) | |
| else: | |
| return response.json() | |
| except requests.exceptions.RequestException as e: | |
| logger.error(f"Request failed: {str(e)}") | |
| raise Exception(f"Failed to connect to proxy server: {str(e)}") | |
| def _parse_stream_response(self, response) -> Generator[ChunkObject, None, None]: | |
| """Parse streaming response""" | |
| try: | |
| response eventos.encoding = 'utf-8' | |
| for line in response.iter_lines(decode_unicode=True): | |
| if not line: | |
| continue | |
| line = line.strip() | |
| if line.startswith('data: '): | |
| data = line[6:] | |
| if data == '[DONE]': | |
| break | |
| try: | |
| chunk_data = json.loads(data) | |
| if 'error' in chunk_data: | |
| raise Exception(f"Stream error: {chunk_data.get('detail', chunk_data['error'])}") | |
| yield ChunkObject(chunk_data) | |
| except json.JSONDecodeError as e: | |
| logger.warning(f"Failed to parse JSON: {data}, error: {str(e)}") | |
| continue | |
| except Exception as e: | |
| logger.error(f"Error parsing stream response: {str(e)}") | |
| raise | |
| def health_check(self) -> dict: | |
| """Health check""" | |
| try: | |
| url = f"{self.base_url}/health" | |
| response = self.session.get(url, timeout=self.timeout) | |
| response.raise_for_status() | |
| return response.json() | |
| except Exception as e: | |
| logger.error(f"Health check failed: {str(e)}") | |
| return {"status": "unhealthy", "error": str(e)} | |
| # Initialize proxy client | |
| client = ProxyClient(PROXY_BASE_URL, PROXY_TIMEOUT) | |
| def chat_with_retry(history_messages, max_retries=MAX_RETRIES): | |
| """Chat function with retry mechanism""" | |
| last_exception = None | |
| for attempt in range(max_retries): | |
| try: | |
| logger.info(f"Chat attempt {attempt + 1}/{max_retries}") | |
| health = client.health_check() | |
| if health.get("status") != "healthy": | |
| raise Exception(f"Proxy service unhealthy: {health}") | |
| response = client.chat_completions_create( | |
| model=model, | |
| messages=history_messages, | |
| stream=True, | |
| temperature=0.7, top_p=0.8 | |
| ) | |
| return response | |
| except Exception as e: | |
| last_exception = e | |
| logger.warning(f"Attempt {attempt + 1} failed: {str(e)}") | |
| if attempt < max_retries - 1: | |
| wait_time = min(2 ** attempt, 4) | |
| logger.info(f"Retrying in {wait_time} seconds...") | |
| time.sleep(wait_time) | |
| else: | |
| logger.error(f"All {max_retries} attempts failed") | |
| raise last_exception | |
| def handle_file_upload(file, state_value): | |
| """Handle PDF file upload and summarization""" | |
| if file is None: | |
| return state_value, state_value["conversations_history"].get(state_value["conversation_id"], []) | |
| history = state_value["conversations_history"].get(state_value["conversation_id"], []) | |
| filename = os.path.basename(file.name) | |
| history.append({ | |
| "role": "user", | |
| "meta": {}, | |
| "key": str(uuid.uuid4()), | |
| "content": f"Uploaded PDF: {filename}" | |
| }) | |
| with pdfplumber.open(file.name) as pdf: | |
| text = "" | |
| for page in pdf.pages: | |
| text += page.extract_text() or "" | |
| prompt = f"Summarize this document: {text}" | |
| summary_key = str(uuid.uuid4()) | |
| history.append({ | |
| "role": "assistant", | |
| "content": "Summarizing the uploaded PDF...", | |
| "key": summary_key, | |
| "meta": {"reason_content": ""}, | |
| "loading": True, | |
| }) | |
| state_value["conversations_history"][state_value["conversation_id"]] = history | |
| yield state_value, history | |
| try: | |
| messages = [{"role": "user", "content": prompt}] | |
| response = chat_with_retry(messages) | |
| summary = "" | |
| for chunk in response: | |
| if chunk.choices and len(chunk.choices) > 0: | |
| content = chunk.choices[0].delta.content | |
| if content: | |
| summary += content | |
| for item in history: | |
| if item["key"] == summary_key: | |
| item["content"] = summary | |
| item["loading"] = False | |
| item["meta"]["end"] = True | |
| break | |
| state_value["last_summary"] = summary | |
| except Exception as e: | |
| for item in history: | |
| if item["key"] == summary_key: | |
| item["content"] = "Failed to summarize the PDF." | |
| item["loading"] = False | |
| item["meta"]["end"] = True | |
| item["meta"]["error"] = True | |
| break | |
| state_value["conversations_history"][state_value["conversation_id"]] = history | |
| return state_value, history | |
| def download_summary(state_value): | |
| """Generate a downloadable summary file""" | |
| if state_value["last_summary"]: | |
| with tempfile.NamedTemporaryFile(delete=False, suffix=".txt") as tmp_file: | |
| tmp_file.write(state_value["last_summary"].encode('utf-8')) | |
| return tmp_file.name | |
| return None | |
| is_modelscope_studio = os.getenv('MODELSCOPE_ENVIRONMENT') == 'studio' | |
| def get_text(text: str, cn_text: str): | |
| if is_modelscope_studio: | |
| return cn_text | |
| return text | |
| logo_img = os.path.join(os.path.dirname(__file__), "rednote_hilab.png") | |
| DEFAULT_PROMPTS = [{ | |
| "category": "🖋 Make a plan", | |
| "prompts": [ | |
| "Help me with a plan to start a business", | |
| "Help me with a plan to achieve my goals", | |
| "Help me with a plan for a successful interview" | |
| ] | |
| }, { | |
| "category": "📅 Help me write", | |
| "prompts": [ | |
| "Help me write a story with a twist ending", | |
| "Help me write a blog post on mental health", | |
| "Help me write a letter to my future self" | |
| ] | |
| }] | |
| DEFAULT_SUGGESTIONS = [{ | |
| "label": 'Make a plan', | |
| "value": "Make a plan", | |
| "children": [{ | |
| "label": "Start a business", | |
| "value": "Help me with a plan to start a business" | |
| }, { | |
| "label": "Achieve my goals", | |
| "value": "Help me with a plan to achieve my goals" | |
| }, { | |
| "label": "Successful interview", | |
| "value": "Help me with a plan for a successful interview" | |
| }] | |
| }, { | |
| "label": 'Help me write', | |
| "value": "Help me write", | |
| "children": [{ | |
| "label": "Story with a twist ending", | |
| "value": "Help me write a story with a twist ending" | |
| }, { | |
| "label": "Blog post on mental health", | |
| "value": "Help me write a blog post on mental health" | |
| }, { | |
| "label": "Letter to my future self", | |
| "value": "Help me write a letter to my future self" | |
| }] | |
| }] | |
| DEFAULT_CONVERSATIONS_HISTORY = [{"role": "placeholder"}] | |
| DEFAULT_LOCALE = 'zh_CN' if is_modelscope_studio else 'en_US' | |
| DEFAULT_THEME = { | |
| "token": { | |
| "colorPrimary": "#6A57FF", | |
| } | |
| } | |
| def format_history(history): | |
| messages = [{ | |
| "role": "system", | |
| "content": "You are a helpful assistant.", | |
| }] | |
| for item in history: | |
| if item["role"] == "user": | |
| messages.append({"role": "user", "content": item["content"]}) | |
| elif item["role"] == "assistant": | |
| messages.append({"role": "assistant", "content": item["content"]}) | |
| return messages | |
| class Gradio_Events: | |
| def _submit(state_value): | |
| history = state_value["conversations_history"][state_value["conversation_id"]] | |
| history_messages = format_history(history) | |
| history.append({ | |
| "role": "assistant", | |
| "content": "", | |
| "key": str(uuid.uuid4()), | |
| "meta": {"reason_content": ""}, | |
| "loading": True, | |
| }) | |
| yield { | |
| chatbot: gr.update(items=history), | |
| state: gr.update(value=state_value), | |
| } | |
| try: | |
| response = chat_with_retry(history_messages) | |
| thought_done = False | |
| for chunk in response: | |
| if chunk.choices and len(chunk.choices) > 0: | |
| content = chunk.choices[0].delta.content | |
| else: | |
| content = None | |
| raise ValueError('Content is None') | |
| history[-1]["loading"] = False | |
| if content and not thought_done: | |
| thought_done = True | |
| history[-1]["content"] = "" | |
| if content: | |
| history[-1]["content"] += content | |
| yield { | |
| chatbot: gr.update(items=history), | |
| state: gr.update(value=state_value) | |
| } | |
| history[-1]["meta"]["end"] = True | |
| print("Answer: ", history[-1]["content"]) | |
| except Exception as e: | |
| history[-1]["loading"] = False | |
| history[-1]["meta"]["end"] = True | |
| history[-1]["meta"]["error"] = True | |
| history[-1]["content"] = "Failed to respond, please try again." | |
| yield { | |
| chatbot: gr.update(items=history), | |
| state: gr.update(value=state_value) | |
| } | |
| print('Error: ', e) | |
| raise e | |
| def submit(sender_value, state_value): | |
| if not state_value["conversation_id"]: | |
| random_id = str(uuid.uuid4()) | |
| history = [] | |
| state_value["conversation_id"] = random_id | |
| state_value["conversations_history"][random_id] = history | |
| state_value["conversations"].append({ | |
| "label": sender_value, | |
| "key": random_id | |
| }) | |
| history = state_value["conversations_history"][state_value["conversation_id"]] | |
| history.append({ | |
| "role": "user", | |
| "meta": {}, | |
| "key": str(uuid.uuid4()), | |
| "content": sender_value | |
| }) | |
| yield Gradio_Events.preprocess_submit()(state_value) | |
| try: | |
| for chunk in Gradio_Events._submit(state_value): | |
| yield chunk | |
| except Exception as e: | |
| raise e | |
| finally: | |
| yield Gradio_Events.postprocess_submit(state_value) | |
| def regenerate_message(state_value, e: gr.EventData): | |
| conversation_key = e._data["component"]["conversationKey"] | |
| history = state_value["conversations_history"][state_value["conversation_id"]] | |
| index = -1 | |
| for i, conversation in enumerate(history): | |
| if conversation["key"] == conversation_key: | |
| index = i | |
| break | |
| if index == -1: | |
| yield gr.skip() | |
| history = history[:index] | |
| state_value["conversations_history"][state_value["conversation_id"]] = history | |
| yield { | |
| chatbot: gr.update(items=history), | |
| state: gr.update(value=state_value) | |
| } | |
| yield Gradio_Events.preprocess_submit(clear_input=False)(state_value) | |
| try: | |
| for chunk in Gradio_Events._submit(state_value): | |
| yield chunk | |
| except Exception as e: | |
| raise e | |
| finally: | |
| yield Gradio_Events.postprocess_submit(state_value) | |
| def preprocess_submit(clear_input=True): | |
| def preprocess_submit_handler(state_value): | |
| history = state_value["conversations_history"][state_value["conversation_id"]] | |
| for conversation in history: | |
| if "meta" in conversation: | |
| conversation["meta"]["disabled"] = True | |
| return { | |
| sender: gr.update(value=None, loading=True) if clear_input else gr.update(loading=True), | |
| conversations: gr.update(active_key=state_value["conversation_id"], | |
| items=list(map(lambda item: {**item, "disabled": True if item["key"] != state_value["conversation_id"] else False}, state_value["conversations"]))), | |
| add_conversation_btn: gr.update(disabled=True), | |
| clear_btn: gr.update(disabled=True), | |
| conversation_delete_menu_item: gr.update(disabled=True), | |
| chatbot: gr.update(items=history), | |
| state: gr.update(value=state_value), | |
| } | |
| return preprocess_submit_handler | |
| def postprocess_submit(state_value): | |
| history = state_value["conversations_history"][state_value["conversation_id"]] | |
| for conversation in history: | |
| if "meta" in conversation: | |
| conversation["meta"]["disabled"] = False | |
| return { | |
| sender: gr.update(loading=False), | |
| conversation_delete_menu_item: gr.update(disabled=False), | |
| clear_btn: gr.update(disabled=False), | |
| conversations: gr.update(items=state_value["conversations"]), | |
| add_conversation_btn: gr.update(disabled=False), | |
| chatbot: gr.update(items=history), | |
| state: gr.update(value=state_value), | |
| } | |
| def cancel(state_value): | |
| history = state_value["conversations_history"][state_value["conversation_id"]] | |
| history[-1]["loading"] = False | |
| history[-1]["meta"]["end"] = True | |
| history[-1]["meta"]["canceled"] = True | |
| return Gradio_Events.postprocess_submit(state_value) | |
| def delete_message(state_value, e: gr.EventData): | |
| conversation_key = e._data["component"]["conversationKey"] | |
| history = state_value["conversations_history"][state_value["conversation_id"]] | |
| history = [item for item in history if item["key"] != conversation_key] | |
| state_value["conversations_history"][state_value["conversation_id"]] = history | |
| return gr.update(items=history if len(history) > 0 else DEFAULT_CONVERSATIONS_HISTORY), gr.update(value=state_value) | |
| def edit_message(state_value, e: gr.EventData): | |
| conversation_key = e._data["component"]["conversationKey"] | |
| history = state_value["conversations_history"][state_value["conversation_id"]] | |
| index = -1 | |
| for i, conversation in enumerate(history): | |
| if conversation["key"] == conversation_key: | |
| index = i | |
| break | |
| if index == -1: | |
| return gr.skip() | |
| state_value["editing_message_index"] = index | |
| text = '' | |
| if isinstance(history[index]["content"], str): | |
| text = history[index]["content"] | |
| else: | |
| text = history[index]["content"]["text"] | |
| return gr.update(value=text), gr.update(value=state_value) | |
| def confirm_edit_message(edit_textarea_value, state_value): | |
| history = state_value["conversations_history"][state_value["conversation_id"]] | |
| message = history[state_value["editing_message_index"]] | |
| if isinstance(message["content"], str): | |
| message["content"] = edit_textarea_value | |
| else: | |
| message["content"]["text"] = edit_textarea_value | |
| return gr.update(items=history), gr.update(value=state_value) | |
| def select_suggestion(sender_value, e: gr.EventData): | |
| return gr.update(value=sender_value[:-1] + e._data["payload"][0]) | |
| def apply_prompt(e: gr.EventData): | |
| return gr.update(value=e._data["payload"][0]["data"]["description"]) | |
| def new_chat(state_value): | |
| if not state_value["conversation_id"]: | |
| return gr.skip() | |
| state_value["conversation_id"] = "" | |
| return gr.update(active_key=state_value["conversation_id"]), gr.update(items=DEFAULT_CONVERSATIONS_HISTORY), gr.update(value=state_value) | |
| def select_conversation(state_value, e: gr.EventData): | |
| active_key = e._data["payload"][0] | |
| if state_value["conversation_id"] == active_key or (active_key not in state_value["conversations_history"]): | |
| return gr.skip() | |
| state_value["conversation_id"] = active_key | |
| return gr.update(active_key=active_key), gr.update(items=state_value["conversations_history"][active_key]), gr.update(value=state_value) | |
| def click_conversation_menu(state_value, e: gr.EventData): | |
| conversation_id = e._data["payload"][0]["key"] | |
| operation = e._data["payload"][1]["key"] | |
| if operation == "delete": | |
| del state_value["conversations_history"][conversation_id] | |
| state_value["conversations"] = [item for item in state_value["conversations"] if item["key"] != conversation_id] | |
| if state_value["conversation_id"] == conversation_id: | |
| state_value["conversation_id"] = "" | |
| return gr.update(items=state_value["conversations"], active_key=state_value["conversation_id"]), gr.update(items=DEFAULT_CONVERSATIONS_HISTORY), gr.update(value=state_value) | |
| else: | |
| return gr.update(items=state_value["conversations"]), gr.skip(), gr.update(value=state_value) | |
| return gr.skip() | |
| def clear_conversation_history(state_value): | |
| if not state_value["conversation_id"]: | |
| return gr.skip() | |
| state_value["conversations_history"][state_value["conversation_id"]] = [] | |
| return gr.update(items=DEFAULT_CONVERSATIONS_HISTORY), gr.update(value=state_value) | |
| def close_modal(): | |
| return gr.update(open=False) | |
| def open_modal(): | |
| return gr.update(open=True) | |
| def update_browser_state(state_value): | |
| return gr.update(value=dict(conversations=state_value["conversations"], conversations_history=state_value["conversations_history"])) | |
| def apply_browser_state(browser_state_value, state_value): | |
| state_value["conversations"] = browser_state_value["conversations"] | |
| state_value["conversations_history"] = browser_state_value["conversations_history"] | |
| return gr.update(items=browser_state_value["conversations"]), gr.update(value=state_value) | |
| css = """ | |
| .gradio-container { | |
| padding: 0 !important; | |
| } | |
| .gradio-container > main.fillable { | |
| padding: 0 !important; | |
| } | |
| #chatbot { | |
| height: calc(100vh - 21px - 16px); | |
| } | |
| #chatbot .chatbot-conversations { | |
| height: 100%; | |
| background-color: var(--ms-gr-ant-color-bg-layout); | |
| } | |
| #chatbot .chatbot-conversations .chatbot-conversations-list { | |
| padding-left: 0; | |
| padding-right: 0; | |
| } | |
| #chatbot .chatbot-chat { | |
| padding: 32px; | |
| height: 100%; | |
| } | |
| @media (max-width: 768px) { | |
| #chatbot .chatbot-chat { | |
| padding: 0; | |
| } | |
| } | |
| #chatbot .chatbot-chat .chatbot-chat-messages { | |
| flex: 1; | |
| } | |
| #chatbot .chatbot-chat .chatbot-chat-messages .chatbot-chat-message .chatbot-chat-message-footer { | |
| visibility: hidden; | |
| opacity: 0; | |
| transition: opacity 0.2s; | |
| } | |
| #chatbot .chatbot-chat .chatbot-chat-messages .chatbot-chat-message:last-child .chatbot-chat-message-footer { | |
| visibility: visible; | |
| opacity: 1; | |
| } | |
| #chatbot .chatbot-chat .chatbot-chat-messages .chatbot-chat-message:hover .chatbot-chat-message-footer { | |
| visibility: visible; | |
| opacity: 1; | |
| } | |
| """ | |
| def logo(): | |
| with antd.Typography.Title(level=1, elem_style=dict(fontSize=24, padding=8, margin=0)): | |
| with antd.Flex(align="center", gap="small", justify="center"): | |
| antd.Image(logo_img, preview=False, alt="logo", width=24, height=24) | |
| ms.Span("dots.llm1.inst") | |
| with gr.Blocks(css=css, fill_width=True) as demo: | |
| state = gr.State({ | |
| "conversations_history": {}, | |
| "conversations": [], | |
| "conversation_id": "", | |
| "editing_message_index": -1, | |
| "last_summary": "" | |
| }) | |
| with ms.Application(), antdx.XProvider(theme=DEFAULT_THEME, locale=DEFAULT_LOCALE), ms.AutoLoading(): | |
| with antd.Row(gutter=[20, 20], wrap=False, elem_id="chatbot"): | |
| with antd.Col(md=dict(flex="0 0 260px", span=24, order=0), span=0, order=1, elem_classes="chatbot-conversations", | |
| elem_style=dict(maxWidth="260px", minWidth="260px", overflow="hidden")): | |
| with antd.Flex(vertical=True, gap="small", elem_style=dict(height="100%", width="100%", minWidth="0")): | |
| logo() | |
| with antd.Button(value=None, color="primary", variant="filled", block=True, elem_style=dict(maxWidth="100%")) as add_conversation_btn: | |
| ms.Text(get_text("New Conversation", "新建对话")) | |
| with ms.Slot("icon"): | |
| antd.Icon("PlusOutlined") | |
| with antdx.Conversations(elem_classes="chatbot-conversations-list", elem_style=dict(width="100%", minWidth="0", overflow="hidden", flex="1")) as conversations: | |
| with ms.Slot('menu.items'): | |
| with antd.Menu.Item(label="Delete", key="delete", danger=True) as conversation_delete_menu_item: | |
| with ms.Slot("icon"): | |
| antd.Icon("DeleteOutlined") | |
| with antd.Col(flex=1, elem_style=dict(height="100%")): | |
| with antd.Flex(vertical=True, gap="middle", elem_classes="chatbot-chat"): | |
| with antdx.Bubble.List(items=DEFAULT_CONVERSATIONS_HISTORY, elem_classes="chatbot-chat-messages") as chatbot: | |
| with ms.Slot("roles"): | |
| with antdx.Bubble.List.Role(role="placeholder", styles=dict(content=dict(width="100%")), variant="borderless"): | |
| with ms.Slot("messageRender"): | |
| with antd.Space(direction="vertical", size=16, elem_style=dict(width="100%")): | |
| with antdx.Welcome(styles=dict(icon=dict(flexShrink=0)), variant="borderless", | |
| title=get_text("Hello, I'm dots.", "你好,我是 dots."), | |
| description=get_text("You can type text to get started.", "你可以输入文本开始对话。")): | |
| with ms.Slot("icon"): | |
| antd.Image(logo_img, preview=False) | |
| with antdx.Prompts(title=get_text("How can I help you today?", "有什么我能帮助你的吗?"), | |
| styles={"list": {"width": '100%'}, "item": {"flex": 1}}) as prompts: | |
| for item in DEFAULT_PROMPTS: | |
| with antdx.Prompts.Item(label=item["category"]): | |
| for prompt in item["prompts"]: | |
| antdx.Prompts.Item(description=prompt) | |
| with antdx.Bubble.List.Role(role="user", placement="end", elem_classes="chatbot-chat-message", | |
| class_names=dict(footer="chatbot-chat-message-footer"), | |
| styles=dict(content=dict(maxWidth="100%", overflow='auto'))): | |
| with ms.Slot("messageRender", params_mapping="(content) => content"): | |
| ms.Markdown() | |
| with ms.Slot("footer", params_mapping="""(bubble) => { | |
| return { | |
| copy_btn: { copyable: { text: typeof bubble.content === 'string' ? bubble.content : bubble.content?.text, tooltips: false } }, | |
| edit_btn: { conversationKey: bubble.key, disabled: bubble.meta.disabled }, | |
| delete_btn: { conversationKey: bubble.key, disabled: bubble.meta.disabled }, | |
| }; | |
| }"""): | |
| with antd.Typography.Text(copyable=dict(tooltips=False), as_item="copy_btn"): | |
| with ms.Slot("copyable.icon"): | |
| with antd.Button(value=None, size="small", color="default", variant="text"): | |
| with ms.Slot("icon"): | |
| antd.Icon("CopyOutlined") | |
| with antd.Button(value=None, size="small", color="default", variant="text"): | |
| with ms.Slot("icon"): | |
| antd.Icon("CheckOutlined") | |
| with antd.Button(value=None, size="small", color="default", variant="text", as_item="edit_btn") as user_edit_btn: | |
| with ms.Slot("icon"): | |
| antd.Icon("EditOutlined") | |
| with antd.Popconfirm(title="Delete the message", description="Are you sure to delete this message?", | |
| ok_button_props=dict(danger=True), as_item="delete_btn") as user_delete_popconfirm: | |
| with antd.Button(value=None, size="small", color="default", variant="text", as_item="delete_btn"): | |
| with ms.Slot("icon"): | |
| antd.Icon("DeleteOutlined") | |
| with antdx.Bubble.List.Role(role="assistant", placement="start", elem_classes="chatbot-chat-message", | |
| class_names=dict(footer="chatbot-chat-message-footer"), | |
| styles=dict(content=dict(maxWidth="100%", overflow='auto'))): | |
| with ms.Slot("avatar"): | |
| antd.Avatar(os.path.join(os.path.dirname(__file__), "rednote_hilab.png")) | |
| with ms.Slot("messageRender", params_mapping="""(content, bubble) => { | |
| const has_error = bubble?.meta?.error | |
| return { | |
| answer: { value: content }, | |
| canceled: bubble.meta?.canceled ? undefined : { style: { display: 'none' } } | |
| } | |
| }"""): | |
| ms.Markdown(as_item="answer", elem_classes="answer-content") | |
| antd.Divider(as_item="canceled") | |
| antd.Typography.Text(get_text("Chat completion paused.", "聊天已暂停。"), as_item="canceled", type="warning") | |
| with ms.Slot("footer", params_mapping="""(bubble) => { | |
| if (bubble?.meta?.end) { | |
| return { | |
| copy_btn: { copyable: { text: bubble.content, tooltips: false } }, | |
| regenerate_btn: { conversationKey: bubble.key, disabled: bubble.meta.disabled }, | |
| delete_btn: { conversationKey: bubble.key, disabled: bubble.meta.disabled }, | |
| edit_btn: { conversationKey: bubble.key, disabled: bubble.meta.disabled }, | |
| }; | |
| } | |
| return { actions_container: { style: { display: 'none' } } }; | |
| }"""): | |
| with ms.Div(as_item="actions_container"): | |
| with antd.Typography.Text(copyable=dict(tooltips=False), as_item="copy_btn"): | |
| with ms.Slot("copyable.icon"): | |
| with antd.Button(value=None, size="small", color="default", variant="text"): | |
| with ms.Slot("icon"): | |
| antd.Icon("CopyOutlined") | |
| with antd.Button(value=None, size="small", color="default", variant="text"): | |
| with ms.Slot("icon"): | |
| antd.Icon("CheckOutlined") | |
| with antd.Popconfirm(title=get_text("Regenerate the message", "重新生成消息"), | |
| description=get_text("Regenerate the message will also delete all subsequent messages.", "重新生成消息将会删除所有的后续消息。"), | |
| ok_button_props=dict(danger=True), as_item="regenerate_btn") as chatbot_regenerate_popconfirm: | |
| with antd.Button(value=None, size="small", color="default", variant="text", as_item="regenerate_btn"): | |
| with ms.Slot("icon"): | |
| antd.Icon("SyncOutlined") | |
| with antd.Button(value=None, size="small", color="default", variant="text", as_item="edit_btn") as chatbot_edit_btn: | |
| with ms.Slot("icon"): | |
| antd.Icon("EditOutlined") | |
| with antd.Popconfirm(title=get_text("Delete the message", "删除消息"), | |
| description=get_text("Are you sure to delete this message?", "确定要删除这条消息吗?"), | |
| ok_button_props=dict(danger=True), as_item="delete_btn") as chatbot_delete_popconfirm: | |
| with antd.Button(value=None, size="small", color="default", variant="text", as_item="delete_btn"): | |
| with ms.Slot("icon"): | |
| antd.Icon("DeleteOutlined") | |
| with antdx.Suggestion(items=DEFAULT_SUGGESTIONS, should_trigger="""(e, { onTrigger, onKeyDown }) => { | |
| switch(e.key) { | |
| case '/': | |
| onTrigger() | |
| break | |
| case 'ArrowRight': | |
| case 'ArrowLeft': | |
| case 'ArrowUp': | |
| case 'ArrowDown': | |
| break; | |
| default: | |
| onTrigger(false) | |
| } | |
| onKeyDown(e) | |
| }""") as suggestion: | |
| with ms.Slot("children"): | |
| with antdx.Sender(placeholder=get_text("Enter / to get suggestions", "输入 / 获取建议")) as sender: | |
| with ms.Slot("prefix"): | |
| with antd.Tooltip(title=get_text("Clear Conversation History", "清空对话历史")): | |
| with antd.Button(value=None, type="text") as clear_btn: | |
| with ms.Slot("icon"): | |
| antd.Icon("ClearOutlined") | |
| file_upload = gr.File(label="Upload PDF for summarization", file_types=[".pdf"]) | |
| with gr.Row(): | |
| download_btn = gr.Button("Download Last Summary") | |
| summary_file = gr.File(label="Summary File", interactive=False) | |
| with antd.Modal(title=get_text("Edit Message", "编辑消息"), open=False, centered=True, width="60%") as edit_modal: | |
| edit_textarea = antd.Input.Textarea(auto_size=dict(minRows=2, maxRows=6), elem_style=dict(width="100%")) | |
| if save_history: | |
| browser_state = gr.BrowserState({"conversations_history": {}, "conversations": []}, storage_key="dots_chatbot_storage") | |
| state.change(fn=Gradio_Events.update_browser_state, inputs=[state], outputs=[browser_state]) | |
| demo.load(fn=Gradio_Events.apply_browser_state, inputs=[browser_state, state], outputs=[conversations, state]) | |
| add_conversation_btn.click(fn=Gradio_Events.new_chat, inputs=[state], outputs=[conversations, chatbot, state]) | |
| conversations.active_change(fn=Gradio_Events.select_conversation, inputs=[state], outputs=[conversations, chatbot, state]) | |
| conversations.menu_click(fn=Gradio_Events.click_conversation_menu, inputs=[state], outputs=[conversations, chatbot, state]) | |
| prompts.item_click(fn=Gradio_Events.apply_prompt, outputs=[sender]) | |
| clear_btn.click(fn=Gradio_Events.clear_conversation_history, inputs=[state], outputs=[chatbot, state]) | |
| suggestion.select(fn=Gradio_Events.select_suggestion, inputs=[sender], outputs=[sender]) | |
| gr.on(triggers=[user_edit_btn.click, chatbot_edit_btn.click], fn=Gradio_Events.edit_message, inputs=[state], outputs=[edit_textarea, state]).then(fn=Gradio_Events.open_modal, outputs=[edit_modal]) | |
| edit_modal.ok(fn=Gradio_Events.confirm_edit_message, inputs=[edit_textarea, state], outputs=[chatbot, state]).then(fn=Gradio_Events.close_modal, outputs=[edit_modal]) | |
| edit_modal.cancel(fn=Gradio_Events.close_modal, outputs=[edit_modal]) | |
| gr.on(triggers=[chatbot_delete_popconfirm.confirm, user_delete_popconfirm.confirm], fn=Gradio_Events.delete_message, inputs=[state], outputs=[chatbot, state]) | |
| regenerating_event = chatbot_regenerate_popconfirm.confirm(fn=Gradio_Events.regenerate_message, inputs=[state], | |
| outputs=[sender, clear_btn, conversation_delete_menu_item, add_conversation_btn, conversations, chatbot, state]) | |
| submit_event = sender.submit(fn=Gradio_Events.submit, inputs=[sender, state], | |
| outputs=[sender, clear_btn, conversation_delete_menu_item, add_conversation_btn, conversations, chatbot, state]) | |
| sender.cancel(fn=None, cancels=[submit_event, regenerating_event]) | |
| sender.cancel(fn=Gradio_Events.cancel, inputs=[state], | |
| outputs=[sender, conversation_delete_menu_item, clear_btn, conversations, add_conversation_btn, chatbot, state]) | |
| file_upload.change(fn=handle_file_upload, inputs=[file_upload, state], outputs=[state, chatbot]) | |
| download_btn.click(fn=download_summary, inputs=[state], outputs=[summary_file]) | |
| if __name__ == "__main__": | |
| demo.queue(default_concurrency_limit=200).launch(ssr_mode=False, max_threads=200) |