Spaces:
Running
on
Zero
Running
on
Zero
Update Gradio app with multiple files
Browse files
app.py
CHANGED
|
@@ -114,13 +114,13 @@ def process_chat_message(
|
|
| 114 |
|
| 115 |
return response
|
| 116 |
|
| 117 |
-
def chat_fn(message: Dict[str, Any], history: List[
|
| 118 |
"""
|
| 119 |
Main chat function that processes user input and returns response.
|
| 120 |
|
| 121 |
Args:
|
| 122 |
message: Dictionary containing text and optional files
|
| 123 |
-
history: Chat history
|
| 124 |
|
| 125 |
Returns:
|
| 126 |
Empty string and updated history
|
|
@@ -144,14 +144,11 @@ def chat_fn(message: Dict[str, Any], history: List[List[Any]]) -> Tuple[str, Lis
|
|
| 144 |
|
| 145 |
# Convert history to format expected by model
|
| 146 |
model_history = []
|
| 147 |
-
for
|
| 148 |
-
if
|
| 149 |
-
model_history.append({"role": "user", "content":
|
| 150 |
-
elif
|
| 151 |
-
model_history.append({"role": "
|
| 152 |
-
|
| 153 |
-
if assistant_msg:
|
| 154 |
-
model_history.append({"role": "assistant", "content": assistant_msg})
|
| 155 |
|
| 156 |
# Get response from model
|
| 157 |
try:
|
|
@@ -159,14 +156,15 @@ def chat_fn(message: Dict[str, Any], history: List[List[Any]]) -> Tuple[str, Lis
|
|
| 159 |
except Exception as e:
|
| 160 |
response = f"Sorry, I encountered an error: {str(e)}"
|
| 161 |
|
| 162 |
-
# Update history
|
| 163 |
if image is not None:
|
| 164 |
-
#
|
| 165 |
-
|
| 166 |
else:
|
| 167 |
-
|
| 168 |
|
| 169 |
-
history.append(
|
|
|
|
| 170 |
|
| 171 |
return "", history
|
| 172 |
|
|
@@ -175,27 +173,39 @@ def retry_fn(history: List[Dict[str, Any]]) -> Tuple[str, List[Dict[str, Any]]]:
|
|
| 175 |
if not history or len(history) < 2:
|
| 176 |
return "", history
|
| 177 |
|
| 178 |
-
# Remove last assistant response
|
| 179 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 180 |
history = history[:-1]
|
| 181 |
|
| 182 |
# Recreate the message dict
|
| 183 |
-
|
| 184 |
-
|
|
|
|
|
|
|
| 185 |
else:
|
| 186 |
-
|
|
|
|
|
|
|
| 187 |
|
| 188 |
return chat_fn(message, history)
|
| 189 |
|
| 190 |
def undo_fn(history: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
| 191 |
-
"""Undo the last message."""
|
| 192 |
-
if history:
|
| 193 |
-
|
| 194 |
-
|
|
|
|
| 195 |
|
| 196 |
-
def clear_fn() -> Tuple[
|
| 197 |
"""Clear the chat."""
|
| 198 |
-
return
|
| 199 |
|
| 200 |
# Create the Gradio interface
|
| 201 |
with gr.Blocks(theme=gr.themes.Soft(), fill_height=True) as demo:
|
|
|
|
| 114 |
|
| 115 |
return response
|
| 116 |
|
| 117 |
+
def chat_fn(message: Dict[str, Any], history: List[Dict[str, Any]]) -> Tuple[str, List[Dict[str, Any]]]:
|
| 118 |
"""
|
| 119 |
Main chat function that processes user input and returns response.
|
| 120 |
|
| 121 |
Args:
|
| 122 |
message: Dictionary containing text and optional files
|
| 123 |
+
history: Chat history in messages format
|
| 124 |
|
| 125 |
Returns:
|
| 126 |
Empty string and updated history
|
|
|
|
| 144 |
|
| 145 |
# Convert history to format expected by model
|
| 146 |
model_history = []
|
| 147 |
+
for msg in history:
|
| 148 |
+
if msg.get("role") == "user":
|
| 149 |
+
model_history.append({"role": "user", "content": msg.get("content", "")})
|
| 150 |
+
elif msg.get("role") == "assistant":
|
| 151 |
+
model_history.append({"role": "assistant", "content": msg.get("content", "")})
|
|
|
|
|
|
|
|
|
|
| 152 |
|
| 153 |
# Get response from model
|
| 154 |
try:
|
|
|
|
| 156 |
except Exception as e:
|
| 157 |
response = f"Sorry, I encountered an error: {str(e)}"
|
| 158 |
|
| 159 |
+
# Update history with proper message format
|
| 160 |
if image is not None:
|
| 161 |
+
# Include image indicator in the content
|
| 162 |
+
user_content = f"{text}\n[Image uploaded]" if text else "[Image uploaded]"
|
| 163 |
else:
|
| 164 |
+
user_content = text
|
| 165 |
|
| 166 |
+
history.append({"role": "user", "content": user_content})
|
| 167 |
+
history.append({"role": "assistant", "content": response})
|
| 168 |
|
| 169 |
return "", history
|
| 170 |
|
|
|
|
| 173 |
if not history or len(history) < 2:
|
| 174 |
return "", history
|
| 175 |
|
| 176 |
+
# Remove last assistant response
|
| 177 |
+
history = history[:-1]
|
| 178 |
+
|
| 179 |
+
# Get the last user message
|
| 180 |
+
last_user_msg = history[-1] if history else None
|
| 181 |
+
if not last_user_msg:
|
| 182 |
+
return "", history
|
| 183 |
+
|
| 184 |
+
# Remove the last user message too (we'll re-add it with new response)
|
| 185 |
history = history[:-1]
|
| 186 |
|
| 187 |
# Recreate the message dict
|
| 188 |
+
user_content = last_user_msg.get("content", "")
|
| 189 |
+
# Extract text without image indicator
|
| 190 |
+
if "[Image uploaded]" in user_content:
|
| 191 |
+
text = user_content.replace("\n[Image uploaded]", "").replace("[Image uploaded]", "")
|
| 192 |
else:
|
| 193 |
+
text = user_content
|
| 194 |
+
|
| 195 |
+
message = {"text": text}
|
| 196 |
|
| 197 |
return chat_fn(message, history)
|
| 198 |
|
| 199 |
def undo_fn(history: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
| 200 |
+
"""Undo the last message pair (user + assistant)."""
|
| 201 |
+
if history and len(history) >= 2:
|
| 202 |
+
# Remove last user and assistant messages
|
| 203 |
+
return history[:-2]
|
| 204 |
+
return []
|
| 205 |
|
| 206 |
+
def clear_fn() -> Tuple[str, List]:
|
| 207 |
"""Clear the chat."""
|
| 208 |
+
return "", []
|
| 209 |
|
| 210 |
# Create the Gradio interface
|
| 211 |
with gr.Blocks(theme=gr.themes.Soft(), fill_height=True) as demo:
|