Spaces:
Running
on
Zero
Running
on
Zero
Joseph Pollack
commited on
adds examples and model loading
Browse files
app.py
CHANGED
|
@@ -61,8 +61,8 @@ class LOperatorDemo:
|
|
| 61 |
|
| 62 |
self.is_loaded = True
|
| 63 |
load_time = time.time() - start_time
|
| 64 |
-
logger.info("
|
| 65 |
-
return "
|
| 66 |
|
| 67 |
except Exception as e:
|
| 68 |
logger.error(f"Error loading model: {str(e)}")
|
|
@@ -170,72 +170,60 @@ class LOperatorDemo:
|
|
| 170 |
# Initialize demo
|
| 171 |
demo_instance = LOperatorDemo()
|
| 172 |
|
| 173 |
-
def
|
| 174 |
-
"""Load model
|
| 175 |
-
import signal
|
| 176 |
-
import time
|
| 177 |
-
|
| 178 |
-
def timeout_handler(signum, frame):
|
| 179 |
-
raise TimeoutError("Model loading timed out")
|
| 180 |
-
|
| 181 |
-
# Set up the signal handler for timeout
|
| 182 |
-
old_handler = signal.signal(signal.SIGALRM, timeout_handler)
|
| 183 |
-
signal.alarm(timeout_seconds)
|
| 184 |
-
|
| 185 |
try:
|
| 186 |
-
logger.info("Loading L-Operator model
|
| 187 |
result = demo_instance.load_model()
|
| 188 |
logger.info(f"Model loading result: {result}")
|
| 189 |
return result
|
| 190 |
-
except TimeoutError:
|
| 191 |
-
logger.error("Model loading timed out - this may be due to network issues or large model size")
|
| 192 |
-
return "β Model loading timed out. Please try again or check your internet connection."
|
| 193 |
except Exception as e:
|
| 194 |
logger.error(f"Error loading model: {str(e)}")
|
| 195 |
return f"β Error loading model: {str(e)}"
|
| 196 |
-
finally:
|
| 197 |
-
# Restore the original signal handler
|
| 198 |
-
signal.alarm(0)
|
| 199 |
-
signal.signal(signal.SIGALRM, old_handler)
|
| 200 |
|
| 201 |
# Load example episodes (lazy loading to avoid startup timeout)
|
| 202 |
def load_example_episodes():
|
| 203 |
-
"""Load example episodes from the extracted data -
|
| 204 |
examples = []
|
| 205 |
|
| 206 |
try:
|
| 207 |
-
# Load episode metadata
|
| 208 |
-
episodes_data = []
|
| 209 |
episode_dirs = ["episode_13", "episode_53", "episode_73"]
|
| 210 |
|
| 211 |
for episode_dir in episode_dirs:
|
| 212 |
try:
|
| 213 |
metadata_path = f"extracted_episodes_duckdb/{episode_dir}/metadata.json"
|
| 214 |
-
|
| 215 |
-
|
| 216 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 217 |
except Exception as e:
|
| 218 |
-
logger.warning(f"Could not load
|
| 219 |
continue
|
| 220 |
|
| 221 |
-
# Create examples with simple path checks (no PIL validation)
|
| 222 |
-
for i, metadata in enumerate(episodes_data):
|
| 223 |
-
episode_num = ["13", "53", "73"][i]
|
| 224 |
-
image_path = f"extracted_episodes_duckdb/episode_{episode_num}/screenshots/screenshot_1.png"
|
| 225 |
-
|
| 226 |
-
# Simple file existence check instead of PIL validation
|
| 227 |
-
if os.path.exists(image_path):
|
| 228 |
-
goal_text = metadata.get('goal', f'Episode {episode_num} example')
|
| 229 |
-
examples.append([
|
| 230 |
-
image_path,
|
| 231 |
-
f"Episode {episode_num}: {goal_text[:50]}..."
|
| 232 |
-
])
|
| 233 |
-
|
| 234 |
except Exception as e:
|
| 235 |
logger.error(f"Error loading examples: {str(e)}")
|
| 236 |
examples = []
|
| 237 |
|
| 238 |
-
logger.info(f"Loaded {len(examples)} examples
|
| 239 |
return examples
|
| 240 |
|
| 241 |
# Create Gradio interface
|
|
@@ -321,12 +309,19 @@ def create_demo():
|
|
| 321 |
|
| 322 |
with gr.Column(scale=2):
|
| 323 |
gr.Markdown("### π¬ Chat Interface")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 324 |
chat_interface = gr.ChatInterface(
|
| 325 |
fn=demo_instance.chat_with_model,
|
| 326 |
additional_inputs=[image_input],
|
| 327 |
title="L-Operator Chat",
|
| 328 |
description="Chat with L-Operator using screenshots and text instructions",
|
| 329 |
-
examples=
|
| 330 |
type="messages",
|
| 331 |
cache_examples=False
|
| 332 |
)
|
|
@@ -355,11 +350,11 @@ def create_demo():
|
|
| 355 |
except:
|
| 356 |
return {"raw_response": response}
|
| 357 |
|
| 358 |
-
# Update model status on page load
|
| 359 |
def update_model_status():
|
| 360 |
if not demo_instance.is_loaded:
|
| 361 |
-
logger.info("Loading model on Gradio startup
|
| 362 |
-
result =
|
| 363 |
logger.info(f"Model loading result: {result}")
|
| 364 |
return result
|
| 365 |
|
|
@@ -380,15 +375,8 @@ def create_demo():
|
|
| 380 |
outputs=model_status
|
| 381 |
)
|
| 382 |
|
| 383 |
-
#
|
| 384 |
-
|
| 385 |
-
return image
|
| 386 |
-
|
| 387 |
-
image_input.change(
|
| 388 |
-
fn=update_chat_image,
|
| 389 |
-
inputs=[image_input],
|
| 390 |
-
outputs=[chat_interface.chatbot]
|
| 391 |
-
)
|
| 392 |
|
| 393 |
gr.Markdown("""
|
| 394 |
---
|
|
|
|
| 61 |
|
| 62 |
self.is_loaded = True
|
| 63 |
load_time = time.time() - start_time
|
| 64 |
+
logger.info(f"Model loaded successfully in {load_time:.1f} seconds")
|
| 65 |
+
return f"β
Model loaded successfully in {load_time:.1f} seconds"
|
| 66 |
|
| 67 |
except Exception as e:
|
| 68 |
logger.error(f"Error loading model: {str(e)}")
|
|
|
|
| 170 |
# Initialize demo
|
| 171 |
demo_instance = LOperatorDemo()
|
| 172 |
|
| 173 |
+
def load_model():
|
| 174 |
+
"""Load model normally"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 175 |
try:
|
| 176 |
+
logger.info("Loading L-Operator model...")
|
| 177 |
result = demo_instance.load_model()
|
| 178 |
logger.info(f"Model loading result: {result}")
|
| 179 |
return result
|
|
|
|
|
|
|
|
|
|
| 180 |
except Exception as e:
|
| 181 |
logger.error(f"Error loading model: {str(e)}")
|
| 182 |
return f"β Error loading model: {str(e)}"
|
|
|
|
|
|
|
|
|
|
|
|
|
| 183 |
|
| 184 |
# Load example episodes (lazy loading to avoid startup timeout)
|
| 185 |
def load_example_episodes():
|
| 186 |
+
"""Load example episodes from the extracted data - properly load images for Gradio"""
|
| 187 |
examples = []
|
| 188 |
|
| 189 |
try:
|
| 190 |
+
# Load episode metadata and images
|
|
|
|
| 191 |
episode_dirs = ["episode_13", "episode_53", "episode_73"]
|
| 192 |
|
| 193 |
for episode_dir in episode_dirs:
|
| 194 |
try:
|
| 195 |
metadata_path = f"extracted_episodes_duckdb/{episode_dir}/metadata.json"
|
| 196 |
+
image_path = f"extracted_episodes_duckdb/{episode_dir}/screenshots/screenshot_1.png"
|
| 197 |
+
|
| 198 |
+
# Check if both files exist
|
| 199 |
+
if os.path.exists(metadata_path) and os.path.exists(image_path):
|
| 200 |
+
with open(metadata_path, "r") as f:
|
| 201 |
+
metadata = json.load(f)
|
| 202 |
+
|
| 203 |
+
# Load the image using PIL
|
| 204 |
+
image = Image.open(image_path)
|
| 205 |
+
|
| 206 |
+
# Ensure image is in RGB mode
|
| 207 |
+
if image.mode != "RGB":
|
| 208 |
+
image = image.convert("RGB")
|
| 209 |
+
|
| 210 |
+
episode_num = episode_dir.split('_')[1]
|
| 211 |
+
goal_text = metadata.get('goal', f'Episode {episode_num} example')
|
| 212 |
+
|
| 213 |
+
examples.append([
|
| 214 |
+
image, # Use PIL Image object instead of file path
|
| 215 |
+
f"Episode {episode_num}: {goal_text[:50]}..."
|
| 216 |
+
])
|
| 217 |
+
|
| 218 |
except Exception as e:
|
| 219 |
+
logger.warning(f"Could not load example for {episode_dir}: {str(e)}")
|
| 220 |
continue
|
| 221 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 222 |
except Exception as e:
|
| 223 |
logger.error(f"Error loading examples: {str(e)}")
|
| 224 |
examples = []
|
| 225 |
|
| 226 |
+
logger.info(f"Loaded {len(examples)} examples with proper image loading")
|
| 227 |
return examples
|
| 228 |
|
| 229 |
# Create Gradio interface
|
|
|
|
| 309 |
|
| 310 |
with gr.Column(scale=2):
|
| 311 |
gr.Markdown("### π¬ Chat Interface")
|
| 312 |
+
# Load examples with error handling
|
| 313 |
+
try:
|
| 314 |
+
examples = load_example_episodes()
|
| 315 |
+
except Exception as e:
|
| 316 |
+
logger.warning(f"Failed to load examples: {str(e)}")
|
| 317 |
+
examples = []
|
| 318 |
+
|
| 319 |
chat_interface = gr.ChatInterface(
|
| 320 |
fn=demo_instance.chat_with_model,
|
| 321 |
additional_inputs=[image_input],
|
| 322 |
title="L-Operator Chat",
|
| 323 |
description="Chat with L-Operator using screenshots and text instructions",
|
| 324 |
+
examples=examples,
|
| 325 |
type="messages",
|
| 326 |
cache_examples=False
|
| 327 |
)
|
|
|
|
| 350 |
except:
|
| 351 |
return {"raw_response": response}
|
| 352 |
|
| 353 |
+
# Update model status on page load
|
| 354 |
def update_model_status():
|
| 355 |
if not demo_instance.is_loaded:
|
| 356 |
+
logger.info("Loading model on Gradio startup...")
|
| 357 |
+
result = load_model()
|
| 358 |
logger.info(f"Model loading result: {result}")
|
| 359 |
return result
|
| 360 |
|
|
|
|
| 375 |
outputs=model_status
|
| 376 |
)
|
| 377 |
|
| 378 |
+
# Note: The chat interface will automatically handle image updates
|
| 379 |
+
# No need for manual image change handling
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 380 |
|
| 381 |
gr.Markdown("""
|
| 382 |
---
|