diff --git a/test_app.ipynb b/test_app.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..5a427f0c01080d5660e2143bf2dd21c017362270 --- /dev/null +++ b/test_app.ipynb @@ -0,0 +1,13973 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "afc32a3c", + "metadata": {}, + "source": [ + "# multiple pdf flow " + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "c59ffa84", + "metadata": {}, + "outputs": [], + "source": [ + "from flask import Flask, request, jsonify, render_template, send_from_directory, send_file\n", + "import cv2, json,base64,io,os,tempfile,logging, re\n", + "import numpy as np\n", + "from unstructured.partition.pdf import partition_pdf\n", + "from PIL import Image\n", + "# from imutils.perspective import four_point_transform\n", + "from dotenv import load_dotenv\n", + "import pytesseract\n", + "from werkzeug.utils import secure_filename\n", + "from langchain_groq import ChatGroq\n", + "from langgraph.prebuilt import create_react_agent\n", + "from pdf2image import convert_from_path, convert_from_bytes\n", + "from concurrent.futures import ThreadPoolExecutor\n", + "from pdf2image.exceptions import PDFInfoNotInstalledError\n", + "from typing import Dict, TypedDict, Optional, Any\n", + "from langgraph.graph import StateGraph, END\n", + "import uuid\n", + "import shutil, time, functools\n", + "from langchain_experimental.open_clip.open_clip import OpenCLIPEmbeddings\n", + "from langchain_core.utils.utils import secret_from_env\n", + "# from matplotlib.offsetbox import OffsetImage, AnnotationBbox\n", + "from io import BytesIO\n", + "from pathlib import Path\n", + "import os\n", + "from utils.block_relation_builder import block_builder#, variable_adder_main\n", + "from langchain.chat_models import ChatOpenAI\n", + "from langchain_openai import ChatOpenAI\n", + "from pydantic import Field, SecretStr" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "d9708ecf", + "metadata": {}, + "outputs": [], + "source": [ + "# Configure logging\n", + "logging.basicConfig(\n", + " level=logging.DEBUG, # Use INFO or ERROR in production\n", + " format=\"%(asctime)s [%(levelname)s] %(message)s\",\n", + " handlers=[\n", + " logging.FileHandler(\"app.log\"),\n", + " logging.StreamHandler()\n", + " ]\n", + ")\n", + "\n", + "logger = logging.getLogger(__name__)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17a2f88f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\\blocks\n", + "\\static\n", + "\\generated_projects\n", + "\\blocks\\Backdrops\n", + "\\blocks\\sprites\n", + "\\blocks\\code_blocks\n", + "\\outputs\n" + ] + } + ], + "source": [ + "# ============================== #\n", + "# TESSERACT CONFIGURATION #\n", + "# ============================== #\n", + "# Set the Tesseract executable path\n", + "# pytesseract.pytesseract.tesseract_cmd = r\"C: / \" / \"Program Files / \"Tesseract-OCR\\tesseract.exe\"\n", + "pytesseract.pytesseract.tesseract_cmd = (r'/usr/bin/tesseract')\n", + "# Set the TESSDATA_PREFIX environment variable to the directory containing the 'tessdata' folder\n", + "# This is crucial for Tesseract to find its language data files (e.g., eng.traineddata)\n", + "# os.environ['TESSDATA_PREFIX'] = r'C:\\Program Files\\Tesseract-OCR'\n", + "\n", + "# poppler_path = r\"C:\\poppler\\Library\\bin\"\n", + "backdrop_images_path = r\"app\\blocks\\Backdrops\"\n", + "sprite_images_path = r\"app\\blocks\\sprites\"\n", + "code_blocks_image_path = r\"app\\blocks\\code_blocks\"\n", + "\n", + "count = 0\n", + "\n", + "BASE_DIR = Path(\"/\")\n", + "BLOCKS_DIR = BASE_DIR / \"blocks\"\n", + "STATIC_DIR = BASE_DIR / \"static\"\n", + "GEN_PROJECT_DIR = BASE_DIR / \"generated_projects\"\n", + "BACKDROP_DIR = BLOCKS_DIR / \"Backdrops\"\n", + "SPRITE_DIR = BLOCKS_DIR / \"sprites\"\n", + "CODE_BLOCKS_DIR = BLOCKS_DIR / \"code_blocks\"\n", + "# === new: outputs rooted under BASE_DIR ===\n", + "OUTPUT_DIR = BASE_DIR / \"outputs\"\n", + "# DETECTED_IMAGE_DIR = OUTPUT_DIR / \"DETECTED_IMAGE\"\n", + "# SCANNED_IMAGE_DIR = OUTPUT_DIR / \"SCANNED_IMAGE\"\n", + "# JSON_DIR = OUTPUT_DIR / \"EXTRACTED_JSON\"\n", + "\n", + "# make all of them in one go\n", + "for d in (\n", + " BLOCKS_DIR,\n", + " STATIC_DIR,\n", + " GEN_PROJECT_DIR,\n", + " BACKDROP_DIR,\n", + " SPRITE_DIR,\n", + " CODE_BLOCKS_DIR,\n", + " OUTPUT_DIR,\n", + " # DETECTED_IMAGE_DIR,\n", + " # SCANNED_IMAGE_DIR,\n", + " # JSON_DIR,\n", + "):\n", + " print(d)\n", + " d.mkdir(parents=True, exist_ok=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3d7d198f", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 31, + "id": "ddb9bce4", + "metadata": {}, + "outputs": [], + "source": [ + "groq_api_key = \"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\"\n", + "\n", + "llm = ChatGroq(\n", + " model=\"meta-llama/llama-4-scout-17b-16e-instruct\",\n", + " temperature=0,\n", + " max_tokens=None,\n", + ")\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e5266620", + "metadata": {}, + "outputs": [], + "source": [ + "SYSTEM_PROMPT = \"\"\n", + "You are an expert AI assistant named GameScratchAgent, specialized in generating and modifying Scratch-VM 3.x game project JSON.\n", + "Your core task is to process game descriptions and existing Scratch JSON structures, then produce or update JSON segments accurately.\n", + "You possess deep knowledge of Scratch 3.0 project schema, informed by comprehensive reference materials. When generating or modifying the `blocks` section, pay extremely close attention to the following:\n", + "\n", + "**Scratch Project JSON Schema Rules:**\n", + "\n", + "1. **Target Structure (`project.json`'s `targets` array):**\n", + " * Each object in the `targets` array represents a Stage or a Sprite.\n", + " * `isStage`: A boolean indicating if the target is the Stage (`true`) or a Sprite (`false`).\n", + " * `name`: The name of the Stage (e.g., `\"Stage\"`) or the Sprite (e.g., `\"Cat\"`). This property replaces `objName` found in older Scratch versions.\n", + " * `variables` dictionary: This dictionary maps unique variable IDs to arrays `[variable_name, initial_value, isCloudVariable?]`.\n", + " * `variable_name`: The user-defined name of the variable.\n", + " * `initial_value`: The variable's initial value, which can be a number or a string.\n", + " * `isCloudVariable?`: (Optional) A boolean indicating if it's a cloud variable (`true`) or a local variable (`false` or absent for regular variables).\n", + " * Example: `\"myVarId123\": [\"score\", 0]`, `\"cloudVarId456\": [\"☁ High Score\", \"54\", true]`\n", + " * `lists` dictionary: This dictionary maps unique list IDs to arrays `[list_name, [item1, item2, ...]]`.\n", + " * Example: `\"myListId789\": [\"my list\", [\"apple\", \"banana\"]]`\n", + " * `broadcasts` dictionary: This dictionary maps unique broadcast IDs to their names.\n", + " * Example: `\"myBroadcastId\": \"Game Over\"`\n", + " * `blocks` dictionary: This dictionary contains all the blocks belonging to this target. Keys are block IDs, values are block objects.\n", + "\n", + "2. **Block Structure (within a `target`'s `blocks` dictionary):**\n", + " * Every block object must have the following core properties:\n", + " * [cite_start]`opcode`: A unique internal identifier for the block's specific functionality (e.g., `\"motion_movesteps\"`, `\"event_whenflagclicked\"`)[cite: 31, 18, 439, 452].\n", + " * `parent`: The ID of the block directly above it in the script stack (or `null` for a top-level block).\n", + " * `next`: The ID of the block directly below it in the script stack (or `null` for the end of a stack).\n", + " * `inputs`: An object defining values or blocks plugged into the block's input slots. Values are **arrays**.\n", + " * `fields`: An object defining dropdown menu selections or direct internal values within the block. Values are **arrays**.\n", + " * `shadow`: `true` if it's a shadow block (e.g., a default number input that can be replaced by another block), `false` otherwise.\n", + " * `topLevel`: `true` if it's a hat block or a standalone block (not connected to a parent), `false` otherwise.\n", + "\n", + "3. **`inputs` Property Details (for blocks plugged into input slots):**\n", + " * **Direct Block Connection (Reporter/Boolean block plugged in):**\n", + " * Format: `\"\": [1, \"\"]`\n", + " * Example: `\"CONDITION\": [1, \"someBooleanBlockId\"]` (e.g., for an `if` block).\n", + " * **Literal Value Input (Shadow block with a literal):**\n", + " * Format: `\"\": [1, [, \"\"]]`\n", + " * `type_code`: A numeric code representing the data type. Common codes include: `4` for number, `7` for string/text, `10` for string/message.\n", + " * `value_string`: The literal value as a string.\n", + " * Examples:\n", + " * Number: `\"STEPS\": [1, [4, \"10\"]]` (for `move 10 steps` block).\n", + " * String/Text: `\"MESSAGE\": [1, [7, \"Hello\"]]` (for `say Hello` block).\n", + " * String/Message (common for text inputs): `\"MESSAGE\": [1, [10, \"Hello!\"]]` (for `say Hello! for 2 secs`).\n", + " * **C-Block Substack (blocks within a loop or conditional):**\n", + " * Format: `\"\": [2, \"\"]`\n", + " * Common `SUBSTACK_NAME` values are `SUBSTACK` (for `if`, `forever`, `repeat`) and `SUBSTACK2` (for `else` in `if else`).\n", + " * Example: `\"SUBSTACK\": [2, \"firstBlockInLoopId\"]`\n", + "\n", + "4. **`fields` Property Details (for dropdowns or direct internal values):**\n", + " * Used for dropdown menus, variable names, list names, or other static selections directly within the block.\n", + " * Format: `\"\": [\"\", null]`\n", + " * Examples:\n", + " * Dropdown: `\"KEY_OPTION\": [\"space\", null]` (for `when space key pressed`).\n", + " * Variable Name: `\"VARIABLE\": [\"score\", null]` (for `set score to 0`).\n", + " * Direction (specific motion block): `\"FORWARD_BACKWARD\": [\"forward\", null]` (for `go forward layers`).\n", + "\n", + "5. **Unique IDs:**\n", + " * All block IDs, variable IDs, and list IDs must be unique strings (e.g., \"myBlock123\", \"myVarId456\", \"myListId789\"). Do NOT use placeholder strings like \"block_id_here\".\n", + "\n", + "6. **No Nested `blocks` Dictionary:**\n", + " * The `blocks` dictionary should only appear once per `target` (sprite/stage). Do NOT nest a `blocks` dictionary inside an individual block definition. Blocks that are part of a substack are linked via the `SUBSTACK` input.\n", + "\n", + "7. **Asset Properties (for Costumes/Sounds):**\n", + " * `assetId`, `md5ext`, `bitmapResolution`, `rotationCenterX`/`rotationCenterY` should be correctly associated with costume and sound objects within the `costumes` and `sounds` arrays.\n", + "\n", + "**General Principles and Important Considerations:**\n", + "* **Backward Compatibility:** Adhere strictly to existing Scratch 3.0 opcodes and schema to ensure backward compatibility with older projects. [cite_start]Opcodes must remain consistent to prevent previously saved projects from failing to load or behaving unexpectedly[cite: 18, 19, 25, 65].\n", + "* **Forgiving Inputs:** Recognize that Scratch is designed to be \"forgiving in its interpretation of inputs.\" [cite_start]The Scratch VM handles potentially \"invalid\" inputs gracefully (e.g., converting a number to a string if expected, returning default values like zero or empty strings, or performing no action) rather than crashing[cite: 20, 21, 22, 38, 39, 41]. This implies that precise type matching for inputs might be handled internally by Scratch, allowing for some flexibility in how values are provided, but the agent should aim for the most common and logical type.\n", + "\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7f819042", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "SYSTEM_PROMPT_JSON_CORRECTOR =\"\"\n", + "You are an assistant that outputs JSON responses strictly following the given schema. \n", + "If the JSON you produce has any formatting errors, missing required fields, or invalid structure, you must identify the problems and correct them. \n", + "Always return only valid JSON that fully conforms to the schema below, enclosed in triple backticks (```), without any extra text or explanation. \n", + "\n", + "If you receive an invalid or incomplete JSON response, fix it by: \n", + "- Adding any missing required fields with appropriate values. \n", + "- Correcting syntax errors such as missing commas, brackets, or quotes. \n", + "- Ensuring the JSON structure matches the schema exactly. \n", + "\n", + "Remember: Your output must be valid JSON only, ready to be parsed without errors.\n", + "\"\"\n", + "# debugger and resolver agent for Scratch 3.0\n", + "# Main agent of the system agent for Scratch 3.0\n", + "agent = create_react_agent(\n", + " model=llm,\n", + " tools=[], # No specific tools are defined here, but could be added later\n", + " prompt=SYSTEM_PROMPT\n", + ")\n", + "\n", + "agent_json_resolver = create_react_agent(\n", + " model=llm,\n", + " tools=[], # No specific tools are defined here, but could be added later\n", + " prompt=SYSTEM_PROMPT_JSON_CORRECTOR \n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ba896f5d", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2025-08-12 17:09:32,793 [INFO] Successfully loaded block catalog from blocks\\hat_blocks.json\n", + "2025-08-12 17:09:32,835 [INFO] Successfully loaded block catalog from blocks\\boolean_blocks.json\n", + "2025-08-12 17:09:32,863 [INFO] Successfully loaded block catalog from blocks\\c_blocks.json\n", + "2025-08-12 17:09:32,875 [INFO] Successfully loaded block catalog from blocks\\cap_blocks.json\n", + "2025-08-12 17:09:32,898 [INFO] Successfully loaded block catalog from blocks\\reporter_blocks.json\n", + "2025-08-12 17:09:32,919 [INFO] Successfully loaded block catalog from blocks\\stack_blocks.json\n", + "2025-08-12 17:09:32,925 [INFO] Successfully loaded block catalog from blocks\\blocks.json\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hat blocks loaded successfully. Hat blocks are characterized by a rounded top and a bump at the bottom. They initiate scripts, meaning they are the starting point for a sequence of interconnected blocks.\n" + ] + } + ], + "source": [ + "# Helper function to load the block catalog from a JSON file\n", + "def _load_block_catalog(file_path: str) -> Dict:\n", + " \"\"Loads the Scratch block catalog from a specified JSON file.\"\"\n", + " try:\n", + " with open(file_path, 'r') as f:\n", + " catalog = json.load(f)\n", + " logger.info(f\"Successfully loaded block catalog from {file_path}\")\n", + " return catalog\n", + " except FileNotFoundError:\n", + " logger.error(f\"Error: Block catalog file not found at {file_path}\")\n", + " # Return an empty dict or raise an error, depending on desired behavior\n", + " return {}\n", + " except json.JSONDecodeError as e:\n", + " logger.error(f\"Error decoding JSON from {file_path}: {e}\")\n", + " return {}\n", + " except Exception as e:\n", + " logger.error(f\"An unexpected error occurred while loading {file_path}: {e}\")\n", + " return {}\n", + " \n", + "def get_block_by_opcode(catalog_data: dict, opcode: str) -> dict | None:\n", + " \"\"\n", + " Search a single catalog (with keys \"description\" and \"blocks\": List[dict])\n", + " for a block whose 'op_code' matches the given opcode.\n", + " Returns the block dict or None if not found.\n", + " \"\"\n", + " for block in catalog_data[\"blocks\"]:\n", + " if block.get(\"op_code\") == opcode:\n", + " return block\n", + " return None\n", + "\n", + "# Helper function to find a block in all catalogs by opcode\n", + "def find_block_in_all(opcode: str, all_catalogs: list[dict]) -> dict | None:\n", + " \"\"\n", + " Search across multiple catalogs for a given opcode.\n", + " Returns the first matching block dict or None.\n", + " \"\"\n", + " for catalog in all_catalogs:\n", + " blk = get_block_by_opcode(catalog, opcode)\n", + " if blk is not None:\n", + " return blk\n", + " return None\n", + "\n", + "def variable_intialization(project_data):\n", + " \"\"\n", + " Updates variable and broadcast definitions in a Scratch project JSON,\n", + " populating the 'variables' and 'broadcasts' sections of the Stage target\n", + " and extracting initial values for variables.\n", + "\n", + " Args:\n", + " project_data (dict): The loaded JSON data of the Scratch project.\n", + "\n", + " Returns:\n", + " dict: The updated project JSON data.\n", + " \"\"\n", + "\n", + " stage_target = None\n", + " for target in project_data['targets']:\n", + " if target.get('isStage'):\n", + " stage_target = target\n", + " break\n", + "\n", + " if stage_target is None:\n", + " print(\"Error: Stage target not found in the project data.\")\n", + " return project_data\n", + "\n", + " # Ensure 'variables' and 'broadcasts' exist in the Stage target\n", + " if \"variables\" not in stage_target:\n", + " stage_target[\"variables\"] = {}\n", + " if \"broadcasts\" not in stage_target:\n", + " stage_target[\"broadcasts\"] = {}\n", + "\n", + " # Helper function to recursively find and update variable/broadcast fields\n", + " def process_dict(obj):\n", + " if isinstance(obj, dict):\n", + " # Check for \"data_setvariableto\" opcode to extract initial values\n", + " if obj.get(\"opcode\") == \"data_setvariableto\":\n", + " variable_field = obj.get(\"fields\", {}).get(\"VARIABLE\")\n", + " value_input = obj.get(\"inputs\", {}).get(\"VALUE\")\n", + "\n", + " if variable_field and isinstance(variable_field, list) and len(variable_field) == 2:\n", + " var_name = variable_field[0]\n", + " var_id = variable_field[1]\n", + "\n", + " initial_value = \"\n", + " if value_input and isinstance(value_input, list) and len(value_input) > 1 and \\\n", + " isinstance(value_input[1], list) and len(value_input[1]) > 1:\n", + " if value_input[1][0] == 10:\n", + " initial_value = str(value_input[1][1])\n", + " elif value_input[1][0] == 12 and len(value_input) > 2 and isinstance(value_input[2], list) and value_input[2][0] == 10:\n", + " initial_value = str(value_input[2][1])\n", + " elif isinstance(value_input[1], (str, int, float)):\n", + " initial_value = str(value_input[1])\n", + " stage_target[\"variables\"][var_id] = [var_name, initial_value]\n", + "\n", + " for key, value in obj.items():\n", + " # Process broadcast definitions in 'inputs' (BROADCAST_INPUT)\n", + " if key == \"BROADCAST_INPUT\" and isinstance(value, list) and len(value) == 2 and \\\n", + " isinstance(value[1], list) and len(value[1]) == 3 and value[1][0] == 11:\n", + " broadcast_name = value[1][1]\n", + " broadcast_id = value[1][2]\n", + " stage_target[\"broadcasts\"][broadcast_id] = broadcast_name\n", + "\n", + " # Process broadcast definitions in 'fields' (BROADCAST_OPTION)\n", + " elif key == \"BROADCAST_OPTION\" and isinstance(value, list) and len(value) == 2:\n", + " broadcast_name = value[0]\n", + " broadcast_id = value[1]\n", + " stage_target[\"broadcasts\"][broadcast_id] = broadcast_name\n", + " \n", + " # Recursively call for nested dictionaries or lists\n", + " process_dict(value)\n", + "\n", + " elif isinstance(obj, list):\n", + " for i, item in enumerate(obj):\n", + " # Process variable references in 'inputs' (like [12, \"score\", \"id\"])\n", + " if isinstance(item, list) and len(item) == 3 and item[0] == 12:\n", + " var_name = item[1]\n", + " var_id = item[2]\n", + " if var_id not in stage_target[\"variables\"]:\n", + " stage_target[\"variables\"][var_id] = [var_name, \"]\n", + " process_dict(item)\n", + "\n", + " # Iterate through all targets to process their blocks\n", + " for target in project_data['targets']:\n", + " if \"blocks\" in target:\n", + " for block_id, block_data in target[\"blocks\"].items():\n", + " process_dict(block_data)\n", + "\n", + " return project_data\n", + "\n", + "def deduplicate_variables(project_data):\n", + " \"\"\n", + " Removes duplicate variable entries in the 'variables' dictionary of the Stage target,\n", + " prioritizing entries with non-empty values.\n", + "\n", + " Args:\n", + " project_data (dict): The loaded JSON data of the Scratch project.\n", + "\n", + " Returns:\n", + " dict: The updated project JSON data with deduplicated variables.\n", + " \"\"\n", + "\n", + " stage_target = None\n", + " for target in project_data['targets']:\n", + " if target.get('isStage'):\n", + " stage_target = target\n", + " break\n", + "\n", + " if stage_target is None:\n", + " print(\"Error: Stage target not found in the project data.\")\n", + " return project_data\n", + "\n", + " if \"variables\" not in stage_target:\n", + " return project_data # No variables to deduplicate\n", + "\n", + " # Use a temporary dictionary to store the preferred variable entry by name\n", + " # Format: {variable_name: [variable_id, variable_name, variable_value]}\n", + " resolved_variables = {}\n", + "\n", + " for var_id, var_info in stage_target[\"variables\"].items():\n", + " var_name = var_info[0]\n", + " var_value = var_info[1]\n", + "\n", + " if var_name not in resolved_variables:\n", + " # If the variable name is not yet seen, add it\n", + " resolved_variables[var_name] = [var_id, var_name, var_value]\n", + " else:\n", + " # If the variable name is already seen, decide which one to keep\n", + " existing_id, existing_name, existing_value = resolved_variables[var_name]\n", + "\n", + " # Prioritize the entry with a non-empty value\n", + " if var_value != \" and existing_value == \":\n", + " resolved_variables[var_name] = [var_id, var_name, var_value]\n", + " # If both have non-empty values, or both are empty, keep the current one (arbitrary choice, but consistent)\n", + " # The current logic will effectively keep the last one encountered that has a value,\n", + " # or the very last one if all are empty.\n", + " elif var_value != \" and existing_value != \":\n", + " # If there are multiple non-empty values for the same variable name\n", + " # this keeps the one from the most recent iteration.\n", + " # For the given example, this will correctly keep \"5\".\n", + " resolved_variables[var_name] = [var_id, var_name, var_value]\n", + " elif var_value == \" and existing_value == \":\n", + " # If both are empty, just keep the current one (arbitrary)\n", + " resolved_variables[var_name] = [var_id, var_name, var_value]\n", + "\n", + "\n", + " # Reconstruct the 'variables' dictionary using the resolved entries\n", + " new_variables_dict = {}\n", + " for var_name, var_data in resolved_variables.items():\n", + " var_id_to_keep = var_data[0]\n", + " var_name_to_keep = var_data[1]\n", + " var_value_to_keep = var_data[2]\n", + " new_variables_dict[var_id_to_keep] = [var_name_to_keep, var_value_to_keep]\n", + "\n", + " stage_target[\"variables\"] = new_variables_dict\n", + "\n", + " return project_data\n", + "\n", + "def variable_adder_main(project_data):\n", + " try:\n", + " declare_variable_json= variable_intialization(project_data)\n", + " print(\"declare_variable_json------->\",declare_variable_json)\n", + " except Exception as e:\n", + " print(f\"Error error in the variable initialization opcodes: {e}\")\n", + " try:\n", + " processed_json= deduplicate_variables(declare_variable_json)\n", + " print(\"processed_json------->\",processed_json)\n", + " return processed_json\n", + " except Exception as e:\n", + " print(f\"Error error in the variable initialization opcodes: {e}\")\n", + "\n", + "\n", + "# --- Global variable for the block catalog ---\n", + "BLOCK_CATALOG_PATH = r\"blocks\\blocks.json\" # Define the path to your JSON file\n", + "HAT_BLOCKS_PATH = r\"blocks\\hat_blocks.json\" # Path to the hat blocks JSON file\n", + "STACK_BLOCKS_PATH = r\"blocks\\stack_blocks.json\" # Path to the stack blocks JSON file\n", + "REPORTER_BLOCKS_PATH = r\"blocks\\reporter_blocks.json\" # Path to the reporter blocks JSON file\n", + "BOOLEAN_BLOCKS_PATH = r\"blocks\\boolean_blocks.json\" # Path to the boolean blocks JSON file\n", + "C_BLOCKS_PATH = r\"blocks\\c_blocks.json\" # Path to the C blocks JSON file\n", + "CAP_BLOCKS_PATH = r\"blocks\\cap_blocks.json\" # Path to the cap blocks JSON file\n", + "\n", + "# Load the block catalogs from their respective JSON files\n", + "hat_block_data = _load_block_catalog(HAT_BLOCKS_PATH)\n", + "hat_description = hat_block_data[\"description\"]\n", + "hat_opcodes_functionalities = \"\\n\".join([f\" - Opcode: {block['op_code']}, functionality: {block['functionality']} example: standalone use {block['example_standalone']}\" for block in hat_block_data[\"blocks\"]])\n", + "print(\"Hat blocks loaded successfully.\", hat_description)\n", + "boolean_block_data = _load_block_catalog(BOOLEAN_BLOCKS_PATH)\n", + "boolean_description = boolean_block_data[\"description\"]\n", + "boolean_opcodes_functionalities = \"\\n\".join([f\" - Opcode: {block['op_code']}, functionality: {block['functionality']} example: standalone use: {block['example_standalone']}\" for block in boolean_block_data[\"blocks\"]])\n", + "\n", + "c_block_data = _load_block_catalog(C_BLOCKS_PATH)\n", + "c_description = c_block_data[\"description\"]\n", + "c_opcodes_functionalities = \"\\n\".join([f\" - Opcode: {block['op_code']}, functionality: {block['functionality']} example: standalone use: {block['example_standalone']}\" for block in c_block_data[\"blocks\"]])\n", + "\n", + "cap_block_data = _load_block_catalog(CAP_BLOCKS_PATH)\n", + "cap_description = cap_block_data[\"description\"] \n", + "cap_opcodes_functionalities = \"\\n\".join([f\" - Opcode: {block['op_code']}, functionality: {block['functionality']} example: standalone use: {block['example_standalone']}\" for block in cap_block_data[\"blocks\"]])\n", + "\n", + "reporter_block_data = _load_block_catalog(REPORTER_BLOCKS_PATH)\n", + "reporter_description = reporter_block_data[\"description\"]\n", + "reporter_opcodes_functionalities = \"\\n\".join([f\" - Opcode: {block['op_code']}, functionality: {block['functionality']} example: standalone use: {block['example_standalone']}\" for block in reporter_block_data[\"blocks\"]])\n", + "\n", + "stack_block_data = _load_block_catalog(STACK_BLOCKS_PATH)\n", + "stack_description = stack_block_data[\"description\"]\n", + "stack_opcodes_functionalities = \"\\n\".join([f\" - Opcode: {block['op_code']}, functionality: {block['functionality']} example: standalone use: {block['example_standalone']}\" for block in stack_block_data[\"blocks\"]])\n", + "\n", + "\n", + "# This makes ALL_SCRATCH_BLOCKS_CATALOG available globally\n", + "ALL_SCRATCH_BLOCKS_CATALOG = _load_block_catalog(BLOCK_CATALOG_PATH)\n", + "\n", + "# Helper function to extract JSON from LLM response\n", + "def extract_json_from_llm_response(raw_response: str) -> dict:\n", + " # --- 1) Pull out the JSON code‑block if present ---\n", + " md = re.search(r\"```(?:json)?\\s*([\\s\\S]*?)\\s*```\", raw_response)\n", + " json_string = md.group(1).strip() if md else raw_response\n", + "\n", + " # --- 2) Trim to the outermost { … } so we drop any prefix/suffix junk ---\n", + " first, last = json_string.find('{'), json_string.rfind('}')\n", + " if 0 <= first < last:\n", + " json_string = json_string[first:last+1]\n", + "\n", + " # --- 3) PRE‑CLEANUP: remove stray assistant{…}, rogue assistant keys, fix boolean quotes ---\n", + " json_string = re.sub(r'\\b\\w+\\s*{', '{', json_string)\n", + " json_string = re.sub(r'\"assistant\"\\s*:', '', json_string)\n", + " json_string = re.sub(r'\\b(false|true)\"', r'\\1', json_string)\n", + " logger.debug(\"Ran pre‑cleanup for stray tokens and boolean quotes.\")\n", + "\n", + " # --- 3.1) Fix stray inner quotes at start of name/list values ---\n", + " # e.g., { \"name\": \" \\\"recent_scoress\\\", ... } → \"recent_scoress\"\n", + " json_string = re.sub(\n", + " r'(\"name\"\\s*:\\s*\")\\s*\"',\n", + " r'\\1',\n", + " json_string\n", + " )\n", + "\n", + " # --- 4) Escape all embedded quotes in any `logic` value up to the next key ---\n", + " def _esc(m):\n", + " prefix, body = m.group(1), m.group(2)\n", + " return prefix + body.replace('\"', r'\\\"')\n", + " json_string = re.sub(\n", + " r'(\"logic\"\\s*:\\s*\")([\\s\\S]+?)(?=\",\\s*\"[A-Za-z_]\\w*\"\\s*:\\s*)',\n", + " _esc,\n", + " json_string\n", + " )\n", + " logger.debug(\"Escaped embedded quotes in logic fields.\")\n", + "\n", + " logger.debug(\"Quoted unquoted keys.\")\n", + "\n", + " # --- 6) Remove trailing commas before } or ] ---\n", + " json_string = re.sub(r',\\s*(?=[}\\],])', '', json_string)\n", + " json_string = re.sub(r',\\s*,', ',', json_string)\n", + " logger.debug(\"Removed trailing commas.\")\n", + "\n", + " # --- 7) Balance braces: drop extra } at end if needed ---\n", + " ob, cb = json_string.count('{'), json_string.count('}')\n", + " if cb > ob:\n", + " excess = cb - ob\n", + " json_string = json_string.rstrip()[:-excess]\n", + " logger.debug(f\"Stripped {excess} extra closing brace(s).\")\n", + "\n", + " # --- 8) Escape literal newlines in *all* string values ---\n", + " json_string = re.sub(\n", + " r'\"((?:[^\"\\\\]|\\\\.)*?)\"',\n", + " lambda m: '\"' + m.group(1).replace('\\n', '\\\\n').replace('\\r', '\\\\r') + '\"',\n", + " json_string,\n", + " flags=re.DOTALL\n", + " )\n", + " logger.debug(\"Escaped newlines in strings.\")\n", + "\n", + " # --- 9) Final parse attempt ---\n", + " try:\n", + " return json.loads(json_string)\n", + " except json.JSONDecodeError:\n", + " logger.error(\"Sanitized JSON still invalid:\\n%s\", json_string)\n", + " raise\n", + "\n", + "def reduce_image_size_to_limit(clean_b64_str, max_kb=4000):\n", + " \"\"\n", + " Reduce an image's size to be as close as possible to max_kb without exceeding it.\n", + " Returns the final base64 string and its size in KB.\n", + " \"\"\n", + " import re, base64\n", + " from io import BytesIO\n", + " from PIL import Image\n", + "\n", + " # Remove the data URI prefix\n", + " base64_data = re.sub(r\"^data:image\\/[a-zA-Z]+;base64,\", \", clean_b64_str)\n", + " image_data = base64.b64decode(base64_data)\n", + "\n", + " # Load into PIL\n", + " img = Image.open(BytesIO(image_data))\n", + "\n", + " low, high = 20, 95 # reasonable JPEG quality range\n", + " best_b64 = None\n", + " best_size_kb = 0\n", + "\n", + " while low <= high:\n", + " mid = (low + high) // 2\n", + " buffer = BytesIO()\n", + " img.save(buffer, format=\"JPEG\", quality=mid)\n", + " size_kb = len(buffer.getvalue()) / \"1024\n", + "\n", + " if size_kb <= max_kb:\n", + " # This quality is valid, try higher\n", + " best_b64 = base64.b64encode(buffer.getvalue()).decode(\"utf-8\")\n", + " best_size_kb = size_kb\n", + " low = mid + 1\n", + " else:\n", + " # Too big, try lower\n", + " high = mid - 1\n", + "\n", + " return f\"data:image/jpeg;base64,{best_b64}\"\n", + "\n", + "#clean the base64 model here\n", + "def clean_base64_for_model(raw_b64):\n", + " import io, base64, re\n", + " from PIL import Image\n", + "\n", + " if not raw_b64:\n", + " return \", \"\n", + "\n", + " if isinstance(raw_b64, list):\n", + " raw_b64 = raw_b64[0] if raw_b64 else \"\n", + " if not raw_b64:\n", + " return \", \"\n", + "\n", + " if isinstance(raw_b64, Image.Image):\n", + " buf = io.BytesIO()\n", + " raw_b64.save(buf, format=\"PNG\")\n", + " raw_b64 = base64.b64encode(buf.getvalue()).decode()\n", + "\n", + " if not isinstance(raw_b64, str):\n", + " raise TypeError(f\"Expected base64 string or PIL Image, got {type(raw_b64)}\")\n", + "\n", + " # Remove data URI prefix if present\n", + " clean_b64 = re.sub(r\"^data:image\\/[a-zA-Z]+;base64,\", \", raw_b64)\n", + " clean_b64 = clean_b64.replace(\"\\n\", \").replace(\"\\r\", \").strip()\n", + "\n", + " # Log original size\n", + " original_size = len(clean_b64.encode(\"utf-8\"))\n", + " print(f\"Original Base64 size (bytes): {original_size}\")\n", + " if original_size > 4000000:\n", + " # Reduce size to under 4 MB\n", + " reduced_b64 = reduce_image_size_to_limit(clean_b64, max_kb=4000)\n", + " clean_b64_2 = re.sub(r\"^data:image\\/[a-zA-Z]+;base64,\", \", reduced_b64)\n", + " clean_b64_2 = clean_b64_2.replace(\"\\n\", \").replace(\"\\r\", \").strip()\n", + " reduced_size = len(clean_b64_2.encode(\"utf-8\"))\n", + " print(f\"Reduced Base64 size (bytes): {reduced_size}\")\n", + " # Return both prefixed and clean reduced versions\n", + " return f\"data:image/jpeg;base64,{reduced_b64}\"\n", + " return f\"data:image/jpeg;base64,{clean_b64}\"\n", + "\n", + "def format_scratch_pseudo_code(code_string):\n", + " \"\"\n", + " Parses and formats Scratch pseudo-code with correct indentation,\n", + " specifically handling if/else/end structures correctly.\n", + "\n", + " Args:\n", + " code_string (str): A string containing Scratch pseudo-code with\n", + " potentially inconsistent indentation.\n", + "\n", + " Returns:\n", + " str: The correctly formatted and indented pseudo-code string.\n", + " \"\"\n", + " lines = code_string.strip().split('\\n')\n", + " formatted_lines = []\n", + " indent_level = 0\n", + " \n", + " # Keywords that increase indentation for the NEXT line\n", + " indent_keywords = ['when', 'forever', 'if', 'repeat', 'else']\n", + " \n", + " # Keywords that decrease indentation for the CURRENT line\n", + " unindent_keywords = ['end', 'else']\n", + "\n", + " for line in lines:\n", + " stripped_line = line.strip()\n", + " if not stripped_line:\n", + " continue\n", + "\n", + " # Check for keywords that should un-indent the current line\n", + " if any(keyword in stripped_line for keyword in unindent_keywords):\n", + " # Special case for 'else': it should align with its 'if'\n", + " if 'else' in stripped_line:\n", + " # Decrease indentation for 'else' and its following lines\n", + " indentation = ' ' * (indent_level -1)\n", + " formatted_lines.append(indentation + stripped_line)\n", + " continue\n", + " \n", + " # For 'end', decrease the level before formatting\n", + " indent_level = max(0, indent_level - 1)\n", + " \n", + " indentation = ' ' * indent_level\n", + " formatted_lines.append(indentation + stripped_line)\n", + "\n", + " # Check for keywords that should indent the next line\n", + " if any(keyword in stripped_line for keyword in indent_keywords):\n", + " # 'else' both un-indents and indents, so the level remains the same for the next block\n", + " if 'else' not in stripped_line:\n", + " indent_level += 1\n", + "\n", + " return '\\n'.join(formatted_lines)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "5f022c12", + "metadata": {}, + "outputs": [], + "source": [ + "hat_list= [block['op_code'] for block in hat_block_data[\"blocks\"]]\n", + "boolean_list= [block['op_code'] for block in boolean_block_data[\"blocks\"]]\n", + "c_list= [block['op_code'] for block in c_block_data[\"blocks\"]]\n", + "cap_list= [block['op_code'] for block in cap_block_data[\"blocks\"]]\n", + "reporter_list= [block['op_code'] for block in reporter_block_data[\"blocks\"]]\n", + "stack_list= [block['op_code'] for block in stack_block_data[\"blocks\"]]\n", + "# boolean_description\n", + "# c_description\n", + "# cap_description\n", + "# reporter_description\n", + "# stack_description\n" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "eefeff72", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['operator_lt',\n", + " 'operator_equals',\n", + " 'operator_gt',\n", + " 'operator_and',\n", + " 'operator_or',\n", + " 'operator_not',\n", + " 'operator_contains',\n", + " 'sensing_touchingobject',\n", + " 'sensing_touchingcolor',\n", + " 'sensing_coloristouchingcolor',\n", + " 'sensing_keypressed',\n", + " 'sensing_mousedown',\n", + " 'data_listcontainsitem']" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "boolean_list" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "9830a322", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['motion_movesteps',\n", + " 'motion_turnright',\n", + " 'motion_turnleft',\n", + " 'motion_goto',\n", + " 'motion_gotoxy',\n", + " 'motion_glideto',\n", + " 'motion_glidesecstoxy',\n", + " 'motion_pointindirection',\n", + " 'motion_pointtowards',\n", + " 'motion_changexby',\n", + " 'motion_setx',\n", + " 'motion_changeyby',\n", + " 'motion_sety',\n", + " 'motion_ifonedgebounce',\n", + " 'motion_setrotationstyle',\n", + " 'looks_sayforsecs',\n", + " 'looks_say',\n", + " 'looks_thinkforsecs',\n", + " 'looks_think',\n", + " 'looks_switchcostumeto',\n", + " 'looks_nextcostume',\n", + " 'looks_switchbackdropto',\n", + " 'looks_switchbackdroptowait',\n", + " 'looks_nextbackdrop',\n", + " 'looks_changesizeby',\n", + " 'looks_setsizeto',\n", + " 'looks_changeeffectby',\n", + " 'looks_seteffectto',\n", + " 'looks_cleargraphiceffects',\n", + " 'looks_show',\n", + " 'looks_hide',\n", + " 'looks_gotofrontback',\n", + " 'looks_goforwardbackwardlayers',\n", + " 'sound_playuntildone',\n", + " 'sound_play',\n", + " 'sound_stopallsounds',\n", + " 'sound_changevolumeby',\n", + " 'sound_setvolumeto',\n", + " 'event_broadcast',\n", + " 'event_broadcastandwait',\n", + " 'control_wait',\n", + " 'control_wait_until',\n", + " 'control_stop',\n", + " 'control_create_clone_of',\n", + " 'control_delete_this_clone',\n", + " 'data_setvariableto',\n", + " 'data_changevariableby',\n", + " 'data_addtolist',\n", + " 'data_deleteoflist',\n", + " 'data_insertatlist',\n", + " 'data_replaceitemoflist',\n", + " 'data_showvariable',\n", + " 'data_hidevariable',\n", + " 'data_showlist',\n", + " 'data_hidelist',\n", + " 'sensing_askandwait',\n", + " 'sensing_resettimer',\n", + " 'sensing_setdragmode',\n", + " 'procedures_call',\n", + " 'operator_lt',\n", + " 'operator_equals',\n", + " 'operator_gt',\n", + " 'operator_and',\n", + " 'operator_or',\n", + " 'operator_not',\n", + " 'operator_contains',\n", + " 'sensing_touchingobject',\n", + " 'sensing_touchingcolor',\n", + " 'sensing_coloristouchingcolor',\n", + " 'sensing_keypressed',\n", + " 'sensing_mousedown',\n", + " 'data_listcontainsitem',\n", + " 'control_repeat',\n", + " 'control_forever',\n", + " 'control_if',\n", + " 'control_if_else',\n", + " 'control_repeat_until',\n", + " 'control_stop',\n", + " 'control_delete_this_clone',\n", + " 'control_forever',\n", + " 'motion_xposition',\n", + " 'motion_yposition',\n", + " 'motion_direction',\n", + " 'looks_costumenumbername',\n", + " 'looks_size',\n", + " 'looks_backdropnumbername',\n", + " 'sound_volume',\n", + " 'sensing_distanceto',\n", + " 'sensing_answer',\n", + " 'sensing_mousex',\n", + " 'sensing_mousey',\n", + " 'sensing_loudness',\n", + " 'sensing_timer',\n", + " 'sensing_of',\n", + " 'sensing_current',\n", + " 'sensing_dayssince2000',\n", + " 'sensing_username',\n", + " 'operator_add',\n", + " 'operator_subtract',\n", + " 'operator_multiply',\n", + " 'operator_divide',\n", + " 'operator_random',\n", + " 'operator_join',\n", + " 'operator_letterof',\n", + " 'operator_length',\n", + " 'operator_mod',\n", + " 'operator_round',\n", + " 'operator_mathop',\n", + " 'data_variable',\n", + " 'data_list',\n", + " 'data_itemoflist',\n", + " 'data_lengthoflist',\n", + " 'data_itemnumoflist',\n", + " 'event_whenflagclicked',\n", + " 'event_whenkeypressed',\n", + " 'event_whenthisspriteclicked',\n", + " 'event_whenbackdropswitchesto',\n", + " 'event_whengreaterthan',\n", + " 'event_whenbroadcastreceived',\n", + " 'control_start_as_clone',\n", + " 'procedures_definition']" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "stack_list + boolean_list + c_list + cap_list + reporter_list + hat_list" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "id": "83846cc3", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 49, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "1321148<4000000" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "id": "61d4b0cc", + "metadata": {}, + "outputs": [], + "source": [ + "import tempfile\n", + "import io\n", + "from io import BytesIO\n", + "from pdf2image import convert_from_path, convert_from_bytes\n", + "\n", + "def convert_pdf_stream_to_images(pdf_stream: io.BytesIO, dpi=300):\n", + " # Ensure we are at the start of the stream\n", + " pdf_stream.seek(0)\n", + " \n", + " with tempfile.NamedTemporaryFile(delete=False, suffix=\".pdf\") as tmp_pdf:\n", + " tmp_pdf.write(pdf_stream.read())\n", + " tmp_pdf_path = tmp_pdf.name\n", + " \n", + " # Now use convert_from_path on the temp file\n", + " images = convert_from_path(tmp_pdf_path, dpi=dpi)\n", + " return images\n", + "\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7555d9df", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2025-08-11 19:08:14,549 [INFO] --- Running DelayForTPMNode ---\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "PDF has 6 pages\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2025-08-11 19:08:15,550 [INFO] Delay completed.\n", + "2025-08-11 19:08:15,551 [INFO] --- Running plan_logic_aligner_node ---\n", + "2025-08-11 19:08:15,554 [DEBUG] Request options: {'method': 'post', 'url': '/openai/v1/chat/completions', 'files': None, 'idempotency_key': 'stainless-python-retry-38a97d6f-f1d4-4c9b-a824-6067e4524acf', 'json_data': {'messages': [{'role': 'system', 'content': '\\nYou are an expert AI assistant named GameScratchAgent, specialized in generating and modifying Scratch-VM 3.x game project JSON.\\nYour core task is to process game descriptions and existing Scratch JSON structures, then produce or update JSON segments accurately.\\nYou possess deep knowledge of Scratch 3.0 project schema, informed by comprehensive reference materials. When generating or modifying the `blocks` section, pay extremely close attention to the following:\\n\\n**Scratch Project JSON Schema Rules:**\\n\\n1. **Target Structure (`project.json`\\'s `targets` array):**\\n * Each object in the `targets` array represents a Stage or a Sprite.\\n * `isStage`: A boolean indicating if the target is the Stage (`true`) or a Sprite (`false`).\\n * `name`: The name of the Stage (e.g., `\"Stage\"`) or the Sprite (e.g., `\"Cat\"`). This property replaces `objName` found in older Scratch versions.\\n * `variables` dictionary: This dictionary maps unique variable IDs to arrays `[variable_name, initial_value, isCloudVariable?]`.\\n * `variable_name`: The user-defined name of the variable.\\n * `initial_value`: The variable\\'s initial value, which can be a number or a string.\\n * `isCloudVariable?`: (Optional) A boolean indicating if it\\'s a cloud variable (`true`) or a local variable (`false` or absent for regular variables).\\n * Example: `\"myVarId123\": [\"score\", 0]`, `\"cloudVarId456\": [\"☁ High Score\", \"54\", true]`\\n * `lists` dictionary: This dictionary maps unique list IDs to arrays `[list_name, [item1, item2, ...]]`.\\n * Example: `\"myListId789\": [\"my list\", [\"apple\", \"banana\"]]`\\n * `broadcasts` dictionary: This dictionary maps unique broadcast IDs to their names.\\n * Example: `\"myBroadcastId\": \"Game Over\"`\\n * `blocks` dictionary: This dictionary contains all the blocks belonging to this target. Keys are block IDs, values are block objects.\\n\\n2. **Block Structure (within a `target`\\'s `blocks` dictionary):**\\n * Every block object must have the following core properties:\\n * [cite_start]`opcode`: A unique internal identifier for the block\\'s specific functionality (e.g., `\"motion_movesteps\"`, `\"event_whenflagclicked\"`)[cite: 31, 18, 439, 452].\\n * `parent`: The ID of the block directly above it in the script stack (or `null` for a top-level block).\\n * `next`: The ID of the block directly below it in the script stack (or `null` for the end of a stack).\\n * `inputs`: An object defining values or blocks plugged into the block\\'s input slots. Values are **arrays**.\\n * `fields`: An object defining dropdown menu selections or direct internal values within the block. Values are **arrays**.\\n * `shadow`: `true` if it\\'s a shadow block (e.g., a default number input that can be replaced by another block), `false` otherwise.\\n * `topLevel`: `true` if it\\'s a hat block or a standalone block (not connected to a parent), `false` otherwise.\\n\\n3. **`inputs` Property Details (for blocks plugged into input slots):**\\n * **Direct Block Connection (Reporter/Boolean block plugged in):**\\n * Format: `\"\": [1, \"\"]`\\n * Example: `\"CONDITION\": [1, \"someBooleanBlockId\"]` (e.g., for an `if` block).\\n * **Literal Value Input (Shadow block with a literal):**\\n * Format: `\"\": [1, [, \"\"]]`\\n * `type_code`: A numeric code representing the data type. Common codes include: `4` for number, `7` for string/text, `10` for string/message.\\n * `value_string`: The literal value as a string.\\n * Examples:\\n * Number: `\"STEPS\": [1, [4, \"10\"]]` (for `move 10 steps` block).\\n * String/Text: `\"MESSAGE\": [1, [7, \"Hello\"]]` (for `say Hello` block).\\n * String/Message (common for text inputs): `\"MESSAGE\": [1, [10, \"Hello!\"]]` (for `say Hello! for 2 secs`).\\n * **C-Block Substack (blocks within a loop or conditional):**\\n * Format: `\"\": [2, \"\"]`\\n * Common `SUBSTACK_NAME` values are `SUBSTACK` (for `if`, `forever`, `repeat`) and `SUBSTACK2` (for `else` in `if else`).\\n * Example: `\"SUBSTACK\": [2, \"firstBlockInLoopId\"]`\\n\\n4. **`fields` Property Details (for dropdowns or direct internal values):**\\n * Used for dropdown menus, variable names, list names, or other static selections directly within the block.\\n * Format: `\"\": [\"\", null]`\\n * Examples:\\n * Dropdown: `\"KEY_OPTION\": [\"space\", null]` (for `when space key pressed`).\\n * Variable Name: `\"VARIABLE\": [\"score\", null]` (for `set score to 0`).\\n * Direction (specific motion block): `\"FORWARD_BACKWARD\": [\"forward\", null]` (for `go forward layers`).\\n\\n5. **Unique IDs:**\\n * All block IDs, variable IDs, and list IDs must be unique strings (e.g., \"myBlock123\", \"myVarId456\", \"myListId789\"). Do NOT use placeholder strings like \"block_id_here\".\\n\\n6. **No Nested `blocks` Dictionary:**\\n * The `blocks` dictionary should only appear once per `target` (sprite/stage). Do NOT nest a `blocks` dictionary inside an individual block definition. Blocks that are part of a substack are linked via the `SUBSTACK` input.\\n\\n7. **Asset Properties (for Costumes/Sounds):**\\n * `assetId`, `md5ext`, `bitmapResolution`, `rotationCenterX`/`rotationCenterY` should be correctly associated with costume and sound objects within the `costumes` and `sounds` arrays.\\n\\n**General Principles and Important Considerations:**\\n* **Backward Compatibility:** Adhere strictly to existing Scratch 3.0 opcodes and schema to ensure backward compatibility with older projects. [cite_start]Opcodes must remain consistent to prevent previously saved projects from failing to load or behaving unexpectedly[cite: 18, 19, 25, 65].\\n* **Forgiving Inputs:** Recognize that Scratch is designed to be \"forgiving in its interpretation of inputs.\" [cite_start]The Scratch VM handles potentially \"invalid\" inputs gracefully (e.g., converting a number to a string if expected, returning default values like zero or empty strings, or performing no action) rather than crashing[cite: 20, 21, 22, 38, 39, 41]. This implies that precise type matching for inputs might be handled internally by Scratch, allowing for some flexibility in how values are provided, but the agent should aim for the most common and logical type.\\n'}, {'role': 'user', 'content': [{'type': 'text', 'text': '\\n You are an expert in Scratch 3.0 game development, specializing in understanding block relationships (stacked, nested).\\n \"Analyze the Scratch code-block image and generate Pseudo-Code for what this logic appears to be doing.\"\\n From Image, you also have to detect a value of Key given in Text form \"Script for: \". Below is the example\\n Example: \"Script for: Bear\", \"Script for:\" is a key and \"Bear\" is value and check if there is related target name available.\\n\\n **Targets in Game (Sprites and Stage) available in project_json:** Cat, Bear, Batter, Beetle, Centaur\\n\\n --- Scratch 3.0 Block Reference ---\\n ### Hat Blocks\\n Description: Hat blocks are characterized by a rounded top and a bump at the bottom. They initiate scripts, meaning they are the starting point for a sequence of interconnected blocks.\\n Blocks:\\n - Opcode: event_whenflagclicked, functionality: This Hat block initiates the script when the green flag is clicked, serving as the common starting point for most Scratch projects. example: standalone use when green flag clicked\\n - Opcode: event_whenkeypressed, functionality: This Hat block initiates the script when a specified keyboard key is pressed. example: standalone use when [space v] key pressed\\n - Opcode: event_whenthisspriteclicked, functionality: This Hat block starts the script when the sprite itself is clicked. example: standalone use when this sprite clicked\\n - Opcode: event_whenbackdropswitchesto, functionality: This Hat block triggers the script when the stage backdrop changes to a specified backdrop. example: standalone use when backdrop switches to [game over v]\\n - Opcode: event_whengreaterthan, functionality: This Hat block starts the script when a certain value (e.g., loudness from a microphone, or the timer) exceeds a defined threshold. example: standalone use when [loudness v] > (70)\\n - Opcode: event_whenbroadcastreceived, functionality: This Hat block initiates the script upon the reception of a specific broadcast message. This mechanism facilitates indirect communication between sprites or the stage. example: standalone use when I receive [start game v]\\n - Opcode: control_start_as_clone, functionality: This Hat block initiates the script when a clone of the sprite is created. It defines the behavior of individual clones. example: standalone use When I Start as a Clone\\n - Opcode: procedures_definition, functionality: This Hat block serves as the definition header for a custom block\\'s script. It allows users to define reusable sequences of code by specifying the block\\'s name and any input parameters it will accept. This promotes modularity and abstraction in projects. example: standalone use define jump (height)\\n\\n ### Boolean Blocks\\n Description: Boolean blocks are hexagonal in shape. They represent conditions that evaluate to either \\'true\\' or \\'false\\' and are typically used as inputs for control flow blocks.\\n Blocks:\\n - Opcode: operator_lt, functionality: Checks if the first value is less than the second. example: standalone use: <(score) < (10)>\\n - Opcode: operator_equals, functionality: Checks if two values are equal. example: standalone use: <(answer) = (5)>\\n - Opcode: operator_gt, functionality: Checks if the first value is greater than the second. example: standalone use: <([health v]) > (0)>\\n - Opcode: operator_and, functionality: Returns \\'true\\' if both provided Boolean conditions are \\'true\\'. example: standalone use: < and >\\n - Opcode: operator_or, functionality: Returns \\'true\\' if at least one of the provided Boolean conditions is \\'true\\'. example: standalone use: < or >\\n - Opcode: operator_not, functionality: Returns \\'true\\' if the provided Boolean condition is \\'false\\', and \\'false\\' if it is \\'true\\'. example: standalone use: >\\n - Opcode: operator_contains, functionality: Checks if one string contains another string. example: standalone use: <[apple v] contains [a v]?>\\n - Opcode: sensing_touchingobject, functionality: Checks if its sprite is touching the mouse-pointer, edge, or another specified sprite. example: standalone use: \\n - Opcode: sensing_touchingcolor, functionality: Checks whether its sprite is touching a specified color. example: standalone use: \\n - Opcode: sensing_coloristouchingcolor, functionality: Checks whether a specific color on its sprite is touching another specified color on the stage or another sprite. example: standalone use: \\n - Opcode: sensing_keypressed, functionality: Checks if a specified keyboard key is currently being pressed. example: standalone use: \\n - Opcode: sensing_mousedown, functionality: Checks if the computer mouse\\'s primary button is being clicked while the cursor is over the stage. example: standalone use: \\n - Opcode: data_listcontainsitem, functionality: Checks if a list includes a specific item. example: standalone use: <[inventory v] contains [key]?>\\n\\n ### C Blocks\\n Description: C blocks are shaped like the letter \\'C\\'. They are used to loop or conditionally execute blocks that are placed within their opening, managing the flow of scripts.\\n Blocks:\\n - Opcode: control_repeat, functionality: Repeats the blocks inside it a specified number of times. example: standalone use: repeat (10)\\n - Opcode: control_forever, functionality: Continuously runs the blocks inside it. example: standalone use: forever\\n - Opcode: control_if, functionality: Executes the blocks inside it only if the specified boolean condition is true. [NOTE: it takes boolean blocks as input] example: standalone use: if then\\n - Opcode: control_if_else, functionality: Executes one set of blocks if the specified boolean condition is true, and a different set of blocks if the condition is false. [NOTE: it takes boolean blocks as input] example: standalone use: if (10)> then else\\n - Opcode: control_repeat_until, functionality: Repeats the blocks inside it until the specified boolean condition becomes true. [NOTE: it takes boolean blocks as input] example: standalone use: repeat until \\n\\n ### Cap Blocks\\n Description: Cap blocks have a notch at the top and a flat bottom. They signify the end of a script, preventing any further blocks from being placed below them, and are used to terminate scripts or specific actions.\\n Blocks:\\n - Opcode: control_stop, functionality: Halts all scripts, only the current script, or other scripts within the same sprite. Its shape can dynamically change based on the selected option. example: standalone use: stop [all v]\\n - Opcode: control_delete_this_clone, functionality: Removes the clone that is executing it from the stage. example: standalone use: delete this clone\\n - Opcode: control_forever, functionality: Continuously runs the blocks inside it. example: standalone use: forever\\n\\n ### Reporter Blocks\\n Description: Reporter blocks have rounded edges. Their purpose is to report values, which can be numbers or strings, and are designed to fit into input slots of other blocks.\\n Blocks:\\n - Opcode: motion_xposition, functionality: Reports the current X-coordinate of the sprite.[NOTE: not used in stage/backdrops] example: standalone use: x position\\n - Opcode: motion_yposition, functionality: Reports the current Y coordinate of the sprite on the stage.[NOTE: not used in stage/backdrops] example: standalone use: y position\\n - Opcode: motion_direction, functionality: Reports the current direction of the sprite in degrees (0 = up, 90 = right, 180 = down, -90 = left).[NOTE: not used in stage/backdrops] example: standalone use: direction\\n - Opcode: looks_costumenumbername, functionality: Reports the current costume\\'s number or name. example: standalone use: costume [number v]\\n - Opcode: looks_size, functionality: Reports the current size of the sprite as a percentage. example: standalone use: size\\n - Opcode: looks_backdropnumbername, functionality: Reports the current backdrop\\'s number or name. example: standalone use: (backdrop [number v])\\n - Opcode: sound_volume, functionality: Reports the current volume level of the sprite. example: standalone use: volume\\n - Opcode: sensing_distanceto, functionality: Reports the distance from the current sprite to the mouse-pointer or another specified sprite. example: standalone use: distance to [mouse-pointer v]\\n - Opcode: sensing_answer, functionality: Holds the most recent text inputted using the \\'Ask () and Wait\\' block. example: standalone use: answer\\n - Opcode: sensing_mousex, functionality: Reports the mouse-pointer’s current X position on the stage. example: standalone use: mouse x\\n - Opcode: sensing_mousey, functionality: Reports the mouse-pointer’s current Y position on the stage. example: standalone use: mouse y\\n - Opcode: sensing_loudness, functionality: Reports the loudness of noise received by a microphone on a scale of 0 to 100. example: standalone use: loudness\\n - Opcode: sensing_timer, functionality: Reports the elapsed time since Scratch was launched or the timer was reset, increasing by 1 every second. example: standalone use: timer\\n - Opcode: sensing_of, functionality: Reports a specified value (e.g., x position, direction, costume number) of a specified sprite or the Stage to be accessed in current sprite or stage. example: standalone use: x position of [Sprite1 v]\\n - Opcode: sensing_current, functionality: Reports the current local year, month, date, day of the week, hour, minutes, or seconds. example: standalone use: current [hour v]\\n - Opcode: sensing_dayssince2000, functionality: Reports the number of days (and fractions of a day) since 00:00:00 UTC on January 1, 2000. example: standalone use: days since 2000\\n - Opcode: sensing_username, functionality: Reports the username of the user currently logged into Scratch. If no user is logged in, it reports nothing. example: standalone use: username\\n - Opcode: operator_add, functionality: Adds two numerical values. example: standalone use: (5) + (3)\\n - Opcode: operator_subtract, functionality: Subtracts the second numerical value from the first. example: standalone use: ((10) - (4))\\n - Opcode: operator_multiply, functionality: Multiplies two numerical values. example: standalone use: (6) * (7)\\n - Opcode: operator_divide, functionality: Divides the first numerical value by the second. example: standalone use: ((20) / (5))\\n - Opcode: operator_random, functionality: Generates a random integer within a specified inclusive range. example: standalone use: (pick random (1) to (10))\\n - Opcode: operator_join, functionality: Concatenates two strings or values into a single string. example: standalone use: (join [Hello ][World!])\\n - Opcode: operator_letterof, functionality: Reports the character at a specific numerical position within a string. example: standalone use: (letter (1) of [apple])\\n - Opcode: operator_length, functionality: Reports the total number of characters in a given string. example: standalone use: (length of [banana])\\n - Opcode: operator_mod, functionality: Reports the remainder when the first number is divided by the second. example: standalone use: ((10) mod (3))\\n - Opcode: operator_round, functionality: Rounds a numerical value to the nearest integer. example: standalone use: (round (3.7))\\n - Opcode: operator_mathop, functionality: Performs various mathematical functions (e.g., absolute value, square root, trigonometric functions). example: standalone use: ([sqrt v] of (25))\\n - Opcode: data_variable, functionality: Provides the current value stored in a variable. example: standalone use: [score v]\\n - Opcode: data_list, functionality: Reports the entire content of a specified list. When clicked in the editor, it displays the list as a monitor. example: standalone use: [my list v]\\n - Opcode: data_itemoflist, functionality: Reports the item located at a specific position in a list. example: standalone use: item (1) of [shopping list v]\\n - Opcode: data_lengthoflist, functionality: Provides the total number of items contained in a list. example: standalone use: (length of [myList v])\\n - Opcode: data_itemnumoflist, functionality: Reports the index number of the first occurrence of a specified item in a list. If the item is not found, it reports 0. example: standalone use: (item # of [apple] in [shopping list v])\\n\\n ### Stack Blocks\\n Description: Stack blocks are the most common block shape, featuring a notch at the top and a bump at the bottom. They perform the main commands within a script and can connect both above and below them.\\n Blocks:\\n - Opcode: motion_movesteps, functionality: Moves the sprite forward by the specified number of steps in the direction it is currently facing. A positive value moves it forward, and a negative value moves it backward. example: standalone use: move () steps\\n - Opcode: motion_turnright, functionality: Turns the sprite clockwise by the specified number of degrees. example: standalone use: turn (clockwise icon) (15) degrees\\n - Opcode: motion_turnleft, functionality: Turns the sprite counter-clockwise by the specified number of degrees. example: standalone use: turn (counter-clockwise icon) (15) degrees\\n - Opcode: motion_goto, functionality: Moves the sprite to a specified location, which can be a random position or at the mouse pointer or another to the sprite. example: standalone use: go to [random position v]\\n - Opcode: motion_gotoxy, functionality: Moves the sprite to the specified X and Y coordinates on the stage. example: standalone use: go to x: (0) y: (0)\\n - Opcode: motion_glideto, functionality: Glides the sprite smoothly to a specified location (random position, mouse pointer, or another sprite) over a given number of seconds. example: standalone use: glide (1) secs to ([random position v])\\n - Opcode: motion_glidesecstoxy, functionality: Glides the sprite smoothly to the specified X and Y coordinates over a given number of seconds. example: standalone use: glide (1) secs to x: (100) y: (50)\\n - Opcode: motion_pointindirection, functionality: Sets the sprite\\'s direction to a specified angle in degrees (0 = up, 90 = right, 180 = down, -90 = left). example: standalone use: point in direction (90)\\n - Opcode: motion_pointtowards, functionality: Points the sprite towards the mouse pointer or another specified sprite. example: standalone use: point towards [mouse-pointer v]\\n - Opcode: motion_changexby, functionality: Changes the sprite\\'s X-coordinate by the specified amount, moving it horizontally. example: standalone use: change x by (10)\\n - Opcode: motion_setx, functionality: Sets the sprite\\'s X-coordinate to a specific value, placing it at a precise horizontal position. example: standalone use: set x to (0)\\n - Opcode: motion_changeyby, functionality: Changes the sprite\\'s Y-coordinate by the specified amount, moving it vertically. example: standalone use: change y by (10)\\n - Opcode: motion_sety, functionality: Sets the sprite\\'s Y-coordinate to a specific value, placing it at a precise vertical position. example: standalone use: set y to (0)\\n - Opcode: motion_ifonedgebounce, functionality: Reverses the sprite\\'s direction if it touches the edge of the stage. example: standalone use: if on edge, bounce\\n - Opcode: motion_setrotationstyle, functionality: Determines how the sprite rotates: \\'left-right\\' (flips horizontally), \\'don\\'t rotate\\' (stays facing one direction), or \\'all around\\' (rotates freely). example: standalone use: set rotation style [left-right v]\\n - Opcode: looks_sayforsecs, functionality: Displays a speech bubble containing specified text for a set duration. example: standalone use: say [Hello!] for (2) seconds\\n - Opcode: looks_say, functionality: Displays a speech bubble with the specified text indefinitely until another \\'say\\' or \\'think\\' block is activated. example: standalone use: say [Hello! v]\\n - Opcode: looks_thinkforsecs, functionality: Displays a thought bubble containing specified text for a set duration. example: standalone use: think [Hmm... v] for (2) seconds\\n - Opcode: looks_think, functionality: Displays a thought bubble with the specified text indefinitely until another \\'say\\' or \\'think\\' block is activated. example: standalone use: think [Got it! v]\\n - Opcode: looks_switchcostumeto, functionality: Alters the sprite\\'s appearance to a designated costume. example: standalone use: switch costume to [costume1 v]\\n - Opcode: looks_nextcostume, functionality: Switches the sprite\\'s costume to the next one in its costume list. If it\\'s the last costume, it cycles back to the first. example: standalone use: next costume\\n - Opcode: looks_switchbackdropto, functionality: Changes the stage\\'s backdrop to a specified backdrop. example: standalone use: switch backdrop to [backdrop1 v]\\n - Opcode: looks_switchbackdroptowait, functionality: Changes the stage\\'s backdrop to a specified backdrop and pauses the script until any \\'When backdrop switches to\\' scripts for that backdrop have finished. example: standalone use: switch backdrop to [game over v] and wait\\n - Opcode: looks_nextbackdrop, functionality: Switches the stage\\'s backdrop to the next one in its backdrop list. If it\\'s the last backdrop, it cycles back to the first. example: standalone use: next backdrop\\n - Opcode: looks_changesizeby, functionality: Changes the sprite\\'s size by a specified percentage. Positive values make it larger, negative values make it smaller. example: standalone use: change size by (10)\\n - Opcode: looks_setsizeto, functionality: Sets the sprite\\'s size to a specific percentage of its original size. example: standalone use: set size to (100)\\n - Opcode: looks_changeeffectby, functionality: Changes a visual effect on the sprite by a specified amount (e.g., color, fisheye, whirl, pixelate, mosaic, brightness, ghost). example: standalone use: change [color v] effect by (25)\\n - Opcode: looks_seteffectto, functionality: Sets a visual effect on the sprite to a specific value. example: standalone use: set [ghost v] effect to (50)\\n - Opcode: looks_cleargraphiceffects, functionality: Removes all visual effects applied to the sprite. example: standalone use: clear graphic effects\\n - Opcode: looks_show, functionality: Makes the sprite visible on the stage. example: standalone use: show\\n - Opcode: looks_hide, functionality: Makes the sprite invisible on the stage. example: standalone use: hide\\n - Opcode: looks_gotofrontback, functionality: Moves the sprite to the front-most or back-most layer of other sprites on the stage. example: standalone use: go to [front v] layer\\n - Opcode: looks_goforwardbackwardlayers, functionality: Moves the sprite forward or backward a specified number of layers in relation to other sprites. example: standalone use: go [forward v] (1) layers\\n - Opcode: sound_playuntildone, functionality: Plays a specified sound and pauses the script\\'s execution until the sound has completed. example: standalone use: play sound [Meow v] until done\\n - Opcode: sound_play, functionality: Initiates playback of a specified sound without pausing the script, allowing other actions to proceed concurrently. example: standalone use: start sound [Pop v]\\n - Opcode: sound_stopallsounds, functionality: Stops all currently playing sounds. example: standalone use: stop all sounds\\n - Opcode: sound_changevolumeby, functionality: Changes the project\\'s sound volume by a specified amount. example: standalone use: change volume by (-10)\\n - Opcode: sound_setvolumeto, functionality: Sets the sound volume to a specific percentage (0-100). example: standalone use: set volume to (100) %\\n - Opcode: event_broadcast, functionality: Sends a broadcast message throughout the Scratch program, activating any \\'when I receive ()\\' blocks that are set to listen for that message, enabling indirect communication. example: standalone use: broadcast [start game v]\\n - Opcode: event_broadcastandwait, functionality: Sends a broadcast message and pauses the current script until all other scripts activated by that broadcast have completed their execution, ensuring sequential coordination. example: standalone use: broadcast [initialize sprites v] and wait\\n - Opcode: control_wait, functionality: Pauses the script for a specified duration. example: standalone use: wait (1) seconds\\n - Opcode: control_wait_until, functionality: Pauses the script until the specified boolean condition becomes true. [NOTE: it takes boolean blocks as input] example: standalone use: wait until \\n - Opcode: control_stop, functionality: Stops all scripts, this script, or other scripts in the sprite. Becomes a Cap Block if \\'all\\' or \\'this script\\' is selected in the dropdown menu. example: standalone use: stop [all v]\\n - Opcode: control_create_clone_of, functionality: Generates a copy, or clone, of a specified sprite (or \\'myself\\' for the current sprite). example: standalone use: create clone of [myself v]\\n - Opcode: control_delete_this_clone, functionality: Deletes the clone that is currently running the script. example: standalone use: delete this clone\\n - Opcode: data_setvariableto, functionality: Assigns a specific value (number, string, or boolean) to a variable. example: standalone use: set [score v] to (0)\\n - Opcode: data_changevariableby, functionality: Increases or decreases a variable\\'s numerical value by a specified amount. example: standalone use: change [score v] by (1)\\n - Opcode: data_addtolist, functionality: Appends an item to the end of a list. example: standalone use: add [apple] to [shopping list v]\\n - Opcode: data_deleteoflist, functionality: Removes an item from a list by its index or by selecting \\'all\\' items. example: standalone use: delete (1) of [my list v]\\n - Opcode: data_insertatlist, functionality: Inserts an item at a specific position within a list. example: standalone use: insert [orange] at (2) of [fruits v]\\n - Opcode: data_replaceitemoflist, functionality: Replaces an item at a specific position in a list with a new value. example: standalone use: replace item (1) of [colors v] with [blue]\\n - Opcode: data_showvariable, functionality: Makes a variable\\'s monitor visible on the stage. example: standalone use: show variable [score v]\\n - Opcode: data_hidevariable, functionality: Hides a variable\\'s monitor from the stage. example: standalone use: hide variable [score v]\\n - Opcode: data_showlist, functionality: Makes a list\\'s monitor visible on the stage. example: standalone use: show list [shopping list v]\\n - Opcode: data_hidelist, functionality: Hides a list\\'s monitor from the stage. example: standalone use: hide list [shopping list v]\\n - Opcode: sensing_askandwait, functionality: Displays an input box with specified text at the bottom of the screen, allowing users to input text, which is stored in the \\'Answer\\' block. example: standalone use: ask [What is your name? v] and wait\\n - Opcode: sensing_resettimer, functionality: Sets the timer’s value back to 0.0. example: standalone use: reset timer\\n - Opcode: sensing_setdragmode, functionality: Sets whether the sprite can be dragged by the mouse on the stage. example: standalone use: set drag mode [draggable v]\\n - Opcode: procedures_call, functionality: Executes the script defined by a corresponding \\'define\\' Hat block. This block allows users to call and reuse custom code sequences by simply dragging and dropping it into their scripts, optionally providing required input values. example: standalone use: jump (50)\\n -----------------------------------\\n\\n Your task is to:\\n If you don\\'t find any \"Code-Blocks\" then,\\n **Don\\'t generate Pseudo Code, and pass the message \"No Code-blocks\" \\n If you find any \"Code-Blocks\" then, \\n 1. **Refine the \\'logic\\'**: Make it precise, accurate, and fully aligned with the Game Description. Use Scratch‑consistent verbs and phrasing. **Do NOT** use raw double‑quotes inside the logic string.\\n\\n 2. **Structural requirements**:\\n - **Numeric values** `(e.g., 0, 5, 0.2, -130)` **must** be in parentheses: `(0)`, `(5)`, `(0.2)`, `(-130)`.\\n - **AlphaNumeric values** `(e.g., hello, say 5, 4, hi!)` **must** be in parentheses: `(hello)`, `(say 5)`, `(4)`, `(hi!)`.\\n - **Variables** must be in the form `[variable v]` (e.g., `[score v]`), even when used inside expressions two example use `set [score v] to (1)` or `show variable ([speed v])`.\\n - **Dropdown options** must be in the form `[option v]` (e.g., `[Game Start v]`, `[blue sky v]`). example use `when [space v] key pressed`.\\n - **Reporter blocks** used as inputs must be double‑wrapped: `((x position))`, `((y position))`. example use `if <((y position)) = (-130)> then` or `(((x position)) * (1))`.\\n - **Boolean blocks** in conditions must be inside `< >`, including nested ones: `>`, `< and >`,`< or >`.\\n - **Other Boolean blocks** in conditions must be inside `< >`, including nested ones or values or variables: `<(block/value/variable) * (block/value/variable)>`,`<(block/value/variable) < (block/value/variable)>`, and example of another variable`<[apple v] contains [a v]?>`.\\n - **Operator expressions** must use explicit Scratch operator blocks, e.g.:\\n ```\\n (([ballSpeed v]) * (1.1))\\n ```\\n - **Every hat block script must end** with a final `end` on its own line.\\n\\n 3. **Pseudo‑code formatting**:\\n - Represent each block or nested block on its own line.\\n - **Indent nested blocks by 4 spaces under their parent (`forever`, `if`, etc.).This is a critical requirement.**\\n - No comments or explanatory text—just the block sequence.\\n - a natural language breakdown of each step taken after the event, formatted as a multi-line string representing pseudo-code. Ensure clarity and granularity—each described action should map closely to a Scratch block or tight sequence. \\n\\n 4. **Logic content**:\\n - Build clear flow for mechanics (movement, jumping, flying, scoring, collisions).\\n - Match each action closely to a Scratch block or tight sequence.\\n - Do **NOT** include any justification or comments—only the raw logic.\\n\\n 5. **Examples for reference**: \\n **Correct** pattern for a simple start script:\\n ```\\n when green flag clicked\\n switch backdrop to [blue sky v]\\n set [score v] to (0)\\n show variable [score v]\\n broadcast [Game Start v]\\n end\\n ```\\n **Correct** pattern for updating the high score variable handling:\\n ```\\n when I receive [Game Over v]\\n if <((score)) > (([High Score v]))> then\\n set [High Score v] to ([score v])\\n end\\n switch backdrop to [Game Over v]\\n end\\n ```\\n **Correct** pattern for level up and increase difficulty use:\\n ```\\n when I receive [Level Up v]\\n change [level v] by (1)\\n set [ballSpeed v] to ((([ballSpeed v]) * (1.1)))\\n end\\n ```\\n **Correct** pattern for jumping mechanics use:\\n ```\\n when [space v] key pressed\\n if <((y position)) = (-100)> then\\n repeat (5)\\n change y by (100)\\n wait (0.1) seconds\\n change y by (-100)\\n wait (0.1) seconds\\n end\\n end\\n end\\n ```\\n **Correct** pattern for continuos moving objects use:\\n ```\\n when green flag clicked\\n go to x: (240) y: (-100)\\n set [speed v] to (-5)\\n show variable [speed v]\\n forever\\n change x by ([speed v])\\n if <((x position)) < (-240)> then\\n go to x: (240) y: (-100) \\n end\\n end\\n end\\n ```\\n 6. **Donot** add any explaination of logic or comments to justify or explain just put the logic content in the json.\\n 7. **Output**: \\n Return **only** a JSON object, using double quotes everywhere:\\n ```json\\n {\\n \"refined_logic\":{\\n \"name_variable\": \\'Value of \"Sript for: \"\\',\\n \"pseudocode\":\"…your fully‑formatted pseudo‑code here…\",\\n }\\n }\\n ```\\n '}, {'type': 'image_url', 'image_url': {'url': ('', '')}}]}], 'model': 'meta-llama/llama-4-scout-17b-16e-instruct', 'n': 1, 'reasoning_format': None, 'stop': None, 'stream': False, 'temperature': 1e-08}}\n", + "2025-08-11 19:08:15,556 [DEBUG] Sending HTTP Request: POST https://api.groq.com/openai/v1/chat/completions\n", + "2025-08-11 19:08:15,557 [DEBUG] close.started\n", + "2025-08-11 19:08:15,558 [DEBUG] close.complete\n", + "2025-08-11 19:08:15,560 [DEBUG] connect_tcp.started host='api.groq.com' port=443 local_address=None timeout=None socket_options=None\n", + "2025-08-11 19:08:15,636 [DEBUG] connect_tcp.complete return_value=\n", + "2025-08-11 19:08:15,637 [DEBUG] start_tls.started ssl_context= server_hostname='api.groq.com' timeout=None\n", + "2025-08-11 19:08:15,673 [DEBUG] start_tls.complete return_value=\n", + "2025-08-11 19:08:15,673 [DEBUG] send_request_headers.started request=\n", + "2025-08-11 19:08:15,674 [DEBUG] send_request_headers.complete\n", + "2025-08-11 19:08:15,676 [DEBUG] send_request_body.started request=\n", + "2025-08-11 19:08:15,676 [DEBUG] send_request_body.complete\n", + "2025-08-11 19:08:15,677 [DEBUG] receive_response_headers.started request=\n", + "2025-08-11 19:08:15,781 [DEBUG] receive_response_headers.complete return_value=(b'HTTP/1.1', 400, b'Bad Request', [(b'Date', b'Mon, 11 Aug 2025 13:38:15 GMT'), (b'Content-Type', b'application/json'), (b'Content-Length', b'449'), (b'Connection', b'keep-alive'), (b'CF-RAY', b'96d821e10be69a6f-NAG'), (b'Cache-Control', b'private, max-age=0, no-store, no-cache, must-revalidate'), (b'vary', b'Origin'), (b'x-groq-region', b'bom'), (b'x-request-id', b'req_01k2cnmt70eqvae0qzc3p23gzm'), (b'via', b'1.1 google'), (b'cf-cache-status', b'DYNAMIC'), (b'Server', b'cloudflare'), (b'alt-svc', b'h3=\":443\"; ma=86400')])\n", + "2025-08-11 19:08:15,785 [INFO] HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 400 Bad Request\"\n", + "2025-08-11 19:08:15,786 [DEBUG] receive_response_body.started request=\n", + "2025-08-11 19:08:15,788 [DEBUG] receive_response_body.complete\n", + "2025-08-11 19:08:15,790 [DEBUG] response_closed.started\n", + "2025-08-11 19:08:15,792 [DEBUG] response_closed.complete\n", + "2025-08-11 19:08:15,793 [DEBUG] HTTP Response: POST https://api.groq.com/openai/v1/chat/completions \"400 Bad Request\" Headers({'date': 'Mon, 11 Aug 2025 13:38:15 GMT', 'content-type': 'application/json', 'content-length': '449', 'connection': 'keep-alive', 'cf-ray': '96d821e10be69a6f-NAG', 'cache-control': 'private, max-age=0, no-store, no-cache, must-revalidate', 'vary': 'Origin', 'x-groq-region': 'bom', 'x-request-id': 'req_01k2cnmt70eqvae0qzc3p23gzm', 'via': '1.1 google', 'cf-cache-status': 'DYNAMIC', 'server': 'cloudflare', 'alt-svc': 'h3=\":443\"; ma=86400'})\n", + "2025-08-11 19:08:15,793 [DEBUG] Encountered httpx.HTTPStatusError\n", + "Traceback (most recent call last):\n", + " File \"c:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\groq\\_base_client.py\", line 1014, in request\n", + " response.raise_for_status()\n", + " File \"c:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\httpx\\_models.py\", line 829, in raise_for_status\n", + " raise HTTPStatusError(message, request=request, response=self)\n", + "httpx.HTTPStatusError: Client error '400 Bad Request' for url 'https://api.groq.com/openai/v1/chat/completions'\n", + "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400\n", + "2025-08-11 19:08:15,794 [DEBUG] Not retrying\n", + "2025-08-11 19:08:15,795 [DEBUG] Re-raising status error\n" + ] + }, + { + "ename": "BadRequestError", + "evalue": "Error code: 400 - {'error': {'message': \"'messages.1' : for 'role:user' the following must be satisfied[('messages.1.content' : one of the following must be satisfied[('messages.1.content' : value must be a string) OR ('messages.1.content.1' : one of the following must be satisfied[('messages.1.content.1.type' : value is not one of the allowed values ['text']) OR ('messages.1.content.1.image_url.url' : value must be a string)])])]\", 'type': 'invalid_request_error'}}", + "output_type": "error", + "traceback": [ + "\u001b[31m---------------------------------------------------------------------------\u001b[39m", + "\u001b[31mBadRequestError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[81]\u001b[39m\u001b[32m, line 291\u001b[39m\n\u001b[32m 273\u001b[39m app_graph = workflow.compile()\n\u001b[32m 275\u001b[39m initial_state_dict = {\n\u001b[32m 276\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mproject_json\u001b[39m\u001b[33m\"\u001b[39m: \u001b[33m\"\u001b[39m\u001b[33mproject_skeleton\u001b[39m\u001b[33m\"\u001b[39m,\n\u001b[32m 277\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mdescription\u001b[39m\u001b[33m\"\u001b[39m: \u001b[33m\"\u001b[39m\u001b[33mThe pseudo code for the script\u001b[39m\u001b[33m\"\u001b[39m,\n\u001b[32m (...)\u001b[39m\u001b[32m 287\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mpseudo_code\u001b[39m\u001b[33m\"\u001b[39m:[]\n\u001b[32m 288\u001b[39m }\n\u001b[32m--> \u001b[39m\u001b[32m291\u001b[39m final_state_dict = \u001b[43mapp_graph\u001b[49m\u001b[43m.\u001b[49m\u001b[43minvoke\u001b[49m\u001b[43m(\u001b[49m\u001b[43minitial_state_dict\u001b[49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Pass dictionary\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\pregel\\__init__.py:2719\u001b[39m, in \u001b[36mPregel.invoke\u001b[39m\u001b[34m(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, checkpoint_during, debug, **kwargs)\u001b[39m\n\u001b[32m 2716\u001b[39m chunks: \u001b[38;5;28mlist\u001b[39m[Union[\u001b[38;5;28mdict\u001b[39m[\u001b[38;5;28mstr\u001b[39m, Any], Any]] = []\n\u001b[32m 2717\u001b[39m interrupts: \u001b[38;5;28mlist\u001b[39m[Interrupt] = []\n\u001b[32m-> \u001b[39m\u001b[32m2719\u001b[39m \u001b[43m\u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mchunk\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mstream\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 2720\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 2721\u001b[39m \u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2722\u001b[39m \u001b[43m \u001b[49m\u001b[43mstream_mode\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstream_mode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2723\u001b[39m \u001b[43m \u001b[49m\u001b[43moutput_keys\u001b[49m\u001b[43m=\u001b[49m\u001b[43moutput_keys\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2724\u001b[39m \u001b[43m \u001b[49m\u001b[43minterrupt_before\u001b[49m\u001b[43m=\u001b[49m\u001b[43minterrupt_before\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2725\u001b[39m \u001b[43m \u001b[49m\u001b[43minterrupt_after\u001b[49m\u001b[43m=\u001b[49m\u001b[43minterrupt_after\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2726\u001b[39m \u001b[43m \u001b[49m\u001b[43mcheckpoint_during\u001b[49m\u001b[43m=\u001b[49m\u001b[43mcheckpoint_during\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2727\u001b[39m \u001b[43m \u001b[49m\u001b[43mdebug\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdebug\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2728\u001b[39m \u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2729\u001b[39m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\u001b[43m:\u001b[49m\n\u001b[32m 2730\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mstream_mode\u001b[49m\u001b[43m \u001b[49m\u001b[43m==\u001b[49m\u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mvalues\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\n\u001b[32m 2731\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 2732\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43misinstance\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mchunk\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mdict\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[32m 2733\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;129;43;01mand\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43m(\u001b[49m\u001b[43mints\u001b[49m\u001b[43m \u001b[49m\u001b[43m:=\u001b[49m\u001b[43m \u001b[49m\u001b[43mchunk\u001b[49m\u001b[43m.\u001b[49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43mINTERRUPT\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mis\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mnot\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\n\u001b[32m 2734\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m:\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\pregel\\__init__.py:2436\u001b[39m, in \u001b[36mPregel.stream\u001b[39m\u001b[34m(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, checkpoint_during, debug, subgraphs)\u001b[39m\n\u001b[32m 2434\u001b[39m \u001b[38;5;28;01mfor\u001b[39;00m task \u001b[38;5;129;01min\u001b[39;00m loop.match_cached_writes():\n\u001b[32m 2435\u001b[39m loop.output_writes(task.id, task.writes, cached=\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[32m-> \u001b[39m\u001b[32m2436\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43m_\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mrunner\u001b[49m\u001b[43m.\u001b[49m\u001b[43mtick\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 2437\u001b[39m \u001b[43m \u001b[49m\u001b[43m[\u001b[49m\u001b[43mt\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mloop\u001b[49m\u001b[43m.\u001b[49m\u001b[43mtasks\u001b[49m\u001b[43m.\u001b[49m\u001b[43mvalues\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mnot\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[43m.\u001b[49m\u001b[43mwrites\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2438\u001b[39m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mstep_timeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2439\u001b[39m \u001b[43m \u001b[49m\u001b[43mget_waiter\u001b[49m\u001b[43m=\u001b[49m\u001b[43mget_waiter\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2440\u001b[39m \u001b[43m \u001b[49m\u001b[43mschedule_task\u001b[49m\u001b[43m=\u001b[49m\u001b[43mloop\u001b[49m\u001b[43m.\u001b[49m\u001b[43maccept_push\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2441\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m:\u001b[49m\n\u001b[32m 2442\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# emit output\u001b[39;49;00m\n\u001b[32m 2443\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43;01myield from\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43moutput\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 2444\u001b[39m \u001b[38;5;66;03m# emit output\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\pregel\\runner.py:162\u001b[39m, in \u001b[36mPregelRunner.tick\u001b[39m\u001b[34m(self, tasks, reraise, timeout, retry_policy, get_waiter, schedule_task)\u001b[39m\n\u001b[32m 160\u001b[39m t = tasks[\u001b[32m0\u001b[39m]\n\u001b[32m 161\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m162\u001b[39m \u001b[43mrun_with_retry\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 163\u001b[39m \u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 164\u001b[39m \u001b[43m \u001b[49m\u001b[43mretry_policy\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 165\u001b[39m \u001b[43m \u001b[49m\u001b[43mconfigurable\u001b[49m\u001b[43m=\u001b[49m\u001b[43m{\u001b[49m\n\u001b[32m 166\u001b[39m \u001b[43m \u001b[49m\u001b[43mCONFIG_KEY_CALL\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mpartial\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 167\u001b[39m \u001b[43m \u001b[49m\u001b[43m_call\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 168\u001b[39m \u001b[43m \u001b[49m\u001b[43mweakref\u001b[49m\u001b[43m.\u001b[49m\u001b[43mref\u001b[49m\u001b[43m(\u001b[49m\u001b[43mt\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 169\u001b[39m \u001b[43m \u001b[49m\u001b[43mretry\u001b[49m\u001b[43m=\u001b[49m\u001b[43mretry_policy\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 170\u001b[39m \u001b[43m \u001b[49m\u001b[43mfutures\u001b[49m\u001b[43m=\u001b[49m\u001b[43mweakref\u001b[49m\u001b[43m.\u001b[49m\u001b[43mref\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfutures\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 171\u001b[39m \u001b[43m \u001b[49m\u001b[43mschedule_task\u001b[49m\u001b[43m=\u001b[49m\u001b[43mschedule_task\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 172\u001b[39m \u001b[43m \u001b[49m\u001b[43msubmit\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43msubmit\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 173\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 174\u001b[39m \u001b[43m \u001b[49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 175\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 176\u001b[39m \u001b[38;5;28mself\u001b[39m.commit(t, \u001b[38;5;28;01mNone\u001b[39;00m)\n\u001b[32m 177\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m exc:\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\pregel\\retry.py:40\u001b[39m, in \u001b[36mrun_with_retry\u001b[39m\u001b[34m(task, retry_policy, configurable)\u001b[39m\n\u001b[32m 38\u001b[39m task.writes.clear()\n\u001b[32m 39\u001b[39m \u001b[38;5;66;03m# run the task\u001b[39;00m\n\u001b[32m---> \u001b[39m\u001b[32m40\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mtask\u001b[49m\u001b[43m.\u001b[49m\u001b[43mproc\u001b[49m\u001b[43m.\u001b[49m\u001b[43minvoke\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtask\u001b[49m\u001b[43m.\u001b[49m\u001b[43minput\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 41\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m ParentCommand \u001b[38;5;28;01mas\u001b[39;00m exc:\n\u001b[32m 42\u001b[39m ns: \u001b[38;5;28mstr\u001b[39m = config[CONF][CONFIG_KEY_CHECKPOINT_NS]\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\utils\\runnable.py:623\u001b[39m, in \u001b[36mRunnableSeq.invoke\u001b[39m\u001b[34m(self, input, config, **kwargs)\u001b[39m\n\u001b[32m 621\u001b[39m \u001b[38;5;66;03m# run in context\u001b[39;00m\n\u001b[32m 622\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m set_config_context(config, run) \u001b[38;5;28;01mas\u001b[39;00m context:\n\u001b[32m--> \u001b[39m\u001b[32m623\u001b[39m \u001b[38;5;28minput\u001b[39m = \u001b[43mcontext\u001b[49m\u001b[43m.\u001b[49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstep\u001b[49m\u001b[43m.\u001b[49m\u001b[43minvoke\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 624\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m 625\u001b[39m \u001b[38;5;28minput\u001b[39m = step.invoke(\u001b[38;5;28minput\u001b[39m, config)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\utils\\runnable.py:377\u001b[39m, in \u001b[36mRunnableCallable.invoke\u001b[39m\u001b[34m(self, input, config, **kwargs)\u001b[39m\n\u001b[32m 375\u001b[39m run_manager.on_chain_end(ret)\n\u001b[32m 376\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m377\u001b[39m ret = \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 378\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m.recurse \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(ret, Runnable):\n\u001b[32m 379\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m ret.invoke(\u001b[38;5;28minput\u001b[39m, config)\n", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[81]\u001b[39m\u001b[32m, line 201\u001b[39m, in \u001b[36mpseudo_generator_node\u001b[39m\u001b[34m(state)\u001b[39m\n\u001b[32m 195\u001b[39m content = [\n\u001b[32m 196\u001b[39m {\u001b[33m\"\u001b[39m\u001b[33mtype\u001b[39m\u001b[33m\"\u001b[39m: \u001b[33m\"\u001b[39m\u001b[33mtext\u001b[39m\u001b[33m\"\u001b[39m, \u001b[33m\"\u001b[39m\u001b[33mtext\u001b[39m\u001b[33m\"\u001b[39m: refinement_prompt},\n\u001b[32m 197\u001b[39m image_input\n\u001b[32m 198\u001b[39m ]\n\u001b[32m 199\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m 200\u001b[39m \u001b[38;5;66;03m#Invoke the main agent for logic refinement and relationship identification\u001b[39;00m\n\u001b[32m--> \u001b[39m\u001b[32m201\u001b[39m response = \u001b[43magent\u001b[49m\u001b[43m.\u001b[49m\u001b[43minvoke\u001b[49m\u001b[43m(\u001b[49m\u001b[43m{\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mmessages\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43m[\u001b[49m\u001b[43m{\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mrole\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43muser\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mcontent\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mcontent\u001b[49m\u001b[43m}\u001b[49m\u001b[43m]\u001b[49m\u001b[43m}\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 202\u001b[39m llm_output_raw = response[\u001b[33m\"\u001b[39m\u001b[33mmessages\u001b[39m\u001b[33m\"\u001b[39m][-\u001b[32m1\u001b[39m].content.strip()\n\u001b[32m 203\u001b[39m \u001b[38;5;66;03m#print(f\"llm_output_raw: {response}\")\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\pregel\\__init__.py:2719\u001b[39m, in \u001b[36mPregel.invoke\u001b[39m\u001b[34m(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, checkpoint_during, debug, **kwargs)\u001b[39m\n\u001b[32m 2716\u001b[39m chunks: \u001b[38;5;28mlist\u001b[39m[Union[\u001b[38;5;28mdict\u001b[39m[\u001b[38;5;28mstr\u001b[39m, Any], Any]] = []\n\u001b[32m 2717\u001b[39m interrupts: \u001b[38;5;28mlist\u001b[39m[Interrupt] = []\n\u001b[32m-> \u001b[39m\u001b[32m2719\u001b[39m \u001b[43m\u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mchunk\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mstream\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 2720\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 2721\u001b[39m \u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2722\u001b[39m \u001b[43m \u001b[49m\u001b[43mstream_mode\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstream_mode\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2723\u001b[39m \u001b[43m \u001b[49m\u001b[43moutput_keys\u001b[49m\u001b[43m=\u001b[49m\u001b[43moutput_keys\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2724\u001b[39m \u001b[43m \u001b[49m\u001b[43minterrupt_before\u001b[49m\u001b[43m=\u001b[49m\u001b[43minterrupt_before\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2725\u001b[39m \u001b[43m \u001b[49m\u001b[43minterrupt_after\u001b[49m\u001b[43m=\u001b[49m\u001b[43minterrupt_after\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2726\u001b[39m \u001b[43m \u001b[49m\u001b[43mcheckpoint_during\u001b[49m\u001b[43m=\u001b[49m\u001b[43mcheckpoint_during\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2727\u001b[39m \u001b[43m \u001b[49m\u001b[43mdebug\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdebug\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2728\u001b[39m \u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2729\u001b[39m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\u001b[43m:\u001b[49m\n\u001b[32m 2730\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mstream_mode\u001b[49m\u001b[43m \u001b[49m\u001b[43m==\u001b[49m\u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mvalues\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\n\u001b[32m 2731\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 2732\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43misinstance\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mchunk\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mdict\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[32m 2733\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;129;43;01mand\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43m(\u001b[49m\u001b[43mints\u001b[49m\u001b[43m \u001b[49m\u001b[43m:=\u001b[49m\u001b[43m \u001b[49m\u001b[43mchunk\u001b[49m\u001b[43m.\u001b[49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43mINTERRUPT\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mis\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mnot\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\n\u001b[32m 2734\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m:\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\pregel\\__init__.py:2436\u001b[39m, in \u001b[36mPregel.stream\u001b[39m\u001b[34m(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, checkpoint_during, debug, subgraphs)\u001b[39m\n\u001b[32m 2434\u001b[39m \u001b[38;5;28;01mfor\u001b[39;00m task \u001b[38;5;129;01min\u001b[39;00m loop.match_cached_writes():\n\u001b[32m 2435\u001b[39m loop.output_writes(task.id, task.writes, cached=\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[32m-> \u001b[39m\u001b[32m2436\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43m_\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mrunner\u001b[49m\u001b[43m.\u001b[49m\u001b[43mtick\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 2437\u001b[39m \u001b[43m \u001b[49m\u001b[43m[\u001b[49m\u001b[43mt\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mloop\u001b[49m\u001b[43m.\u001b[49m\u001b[43mtasks\u001b[49m\u001b[43m.\u001b[49m\u001b[43mvalues\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mnot\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[43m.\u001b[49m\u001b[43mwrites\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2438\u001b[39m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mstep_timeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2439\u001b[39m \u001b[43m \u001b[49m\u001b[43mget_waiter\u001b[49m\u001b[43m=\u001b[49m\u001b[43mget_waiter\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2440\u001b[39m \u001b[43m \u001b[49m\u001b[43mschedule_task\u001b[49m\u001b[43m=\u001b[49m\u001b[43mloop\u001b[49m\u001b[43m.\u001b[49m\u001b[43maccept_push\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 2441\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m:\u001b[49m\n\u001b[32m 2442\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# emit output\u001b[39;49;00m\n\u001b[32m 2443\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43;01myield from\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43moutput\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 2444\u001b[39m \u001b[38;5;66;03m# emit output\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\pregel\\runner.py:162\u001b[39m, in \u001b[36mPregelRunner.tick\u001b[39m\u001b[34m(self, tasks, reraise, timeout, retry_policy, get_waiter, schedule_task)\u001b[39m\n\u001b[32m 160\u001b[39m t = tasks[\u001b[32m0\u001b[39m]\n\u001b[32m 161\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m162\u001b[39m \u001b[43mrun_with_retry\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 163\u001b[39m \u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 164\u001b[39m \u001b[43m \u001b[49m\u001b[43mretry_policy\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 165\u001b[39m \u001b[43m \u001b[49m\u001b[43mconfigurable\u001b[49m\u001b[43m=\u001b[49m\u001b[43m{\u001b[49m\n\u001b[32m 166\u001b[39m \u001b[43m \u001b[49m\u001b[43mCONFIG_KEY_CALL\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mpartial\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 167\u001b[39m \u001b[43m \u001b[49m\u001b[43m_call\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 168\u001b[39m \u001b[43m \u001b[49m\u001b[43mweakref\u001b[49m\u001b[43m.\u001b[49m\u001b[43mref\u001b[49m\u001b[43m(\u001b[49m\u001b[43mt\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 169\u001b[39m \u001b[43m \u001b[49m\u001b[43mretry\u001b[49m\u001b[43m=\u001b[49m\u001b[43mretry_policy\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 170\u001b[39m \u001b[43m \u001b[49m\u001b[43mfutures\u001b[49m\u001b[43m=\u001b[49m\u001b[43mweakref\u001b[49m\u001b[43m.\u001b[49m\u001b[43mref\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfutures\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 171\u001b[39m \u001b[43m \u001b[49m\u001b[43mschedule_task\u001b[49m\u001b[43m=\u001b[49m\u001b[43mschedule_task\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 172\u001b[39m \u001b[43m \u001b[49m\u001b[43msubmit\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43msubmit\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 173\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 174\u001b[39m \u001b[43m \u001b[49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 175\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 176\u001b[39m \u001b[38;5;28mself\u001b[39m.commit(t, \u001b[38;5;28;01mNone\u001b[39;00m)\n\u001b[32m 177\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m exc:\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\pregel\\retry.py:40\u001b[39m, in \u001b[36mrun_with_retry\u001b[39m\u001b[34m(task, retry_policy, configurable)\u001b[39m\n\u001b[32m 38\u001b[39m task.writes.clear()\n\u001b[32m 39\u001b[39m \u001b[38;5;66;03m# run the task\u001b[39;00m\n\u001b[32m---> \u001b[39m\u001b[32m40\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mtask\u001b[49m\u001b[43m.\u001b[49m\u001b[43mproc\u001b[49m\u001b[43m.\u001b[49m\u001b[43minvoke\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtask\u001b[49m\u001b[43m.\u001b[49m\u001b[43minput\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 41\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m ParentCommand \u001b[38;5;28;01mas\u001b[39;00m exc:\n\u001b[32m 42\u001b[39m ns: \u001b[38;5;28mstr\u001b[39m = config[CONF][CONFIG_KEY_CHECKPOINT_NS]\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\utils\\runnable.py:623\u001b[39m, in \u001b[36mRunnableSeq.invoke\u001b[39m\u001b[34m(self, input, config, **kwargs)\u001b[39m\n\u001b[32m 621\u001b[39m \u001b[38;5;66;03m# run in context\u001b[39;00m\n\u001b[32m 622\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m set_config_context(config, run) \u001b[38;5;28;01mas\u001b[39;00m context:\n\u001b[32m--> \u001b[39m\u001b[32m623\u001b[39m \u001b[38;5;28minput\u001b[39m = \u001b[43mcontext\u001b[49m\u001b[43m.\u001b[49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstep\u001b[49m\u001b[43m.\u001b[49m\u001b[43minvoke\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 624\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m 625\u001b[39m \u001b[38;5;28minput\u001b[39m = step.invoke(\u001b[38;5;28minput\u001b[39m, config)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\utils\\runnable.py:370\u001b[39m, in \u001b[36mRunnableCallable.invoke\u001b[39m\u001b[34m(self, input, config, **kwargs)\u001b[39m\n\u001b[32m 368\u001b[39m \u001b[38;5;66;03m# run in context\u001b[39;00m\n\u001b[32m 369\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m set_config_context(child_config, run) \u001b[38;5;28;01mas\u001b[39;00m context:\n\u001b[32m--> \u001b[39m\u001b[32m370\u001b[39m ret = \u001b[43mcontext\u001b[49m\u001b[43m.\u001b[49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 371\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[32m 372\u001b[39m run_manager.on_chain_error(e)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langgraph\\prebuilt\\chat_agent_executor.py:507\u001b[39m, in \u001b[36mcreate_react_agent..call_model\u001b[39m\u001b[34m(state, config)\u001b[39m\n\u001b[32m 505\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mcall_model\u001b[39m(state: StateSchema, config: RunnableConfig) -> StateSchema:\n\u001b[32m 506\u001b[39m state = _get_model_input_state(state)\n\u001b[32m--> \u001b[39m\u001b[32m507\u001b[39m response = cast(AIMessage, \u001b[43mmodel_runnable\u001b[49m\u001b[43m.\u001b[49m\u001b[43minvoke\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstate\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[32m 508\u001b[39m \u001b[38;5;66;03m# add agent name to the AIMessage\u001b[39;00m\n\u001b[32m 509\u001b[39m response.name = name\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langchain_core\\runnables\\base.py:3047\u001b[39m, in \u001b[36mRunnableSequence.invoke\u001b[39m\u001b[34m(self, input, config, **kwargs)\u001b[39m\n\u001b[32m 3045\u001b[39m input_ = context.run(step.invoke, input_, config, **kwargs)\n\u001b[32m 3046\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m3047\u001b[39m input_ = \u001b[43mcontext\u001b[49m\u001b[43m.\u001b[49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstep\u001b[49m\u001b[43m.\u001b[49m\u001b[43minvoke\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minput_\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 3048\u001b[39m \u001b[38;5;66;03m# finish the root run\u001b[39;00m\n\u001b[32m 3049\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langchain_core\\language_models\\chat_models.py:372\u001b[39m, in \u001b[36mBaseChatModel.invoke\u001b[39m\u001b[34m(self, input, config, stop, **kwargs)\u001b[39m\n\u001b[32m 360\u001b[39m \u001b[38;5;129m@override\u001b[39m\n\u001b[32m 361\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34minvoke\u001b[39m(\n\u001b[32m 362\u001b[39m \u001b[38;5;28mself\u001b[39m,\n\u001b[32m (...)\u001b[39m\u001b[32m 367\u001b[39m **kwargs: Any,\n\u001b[32m 368\u001b[39m ) -> BaseMessage:\n\u001b[32m 369\u001b[39m config = ensure_config(config)\n\u001b[32m 370\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m cast(\n\u001b[32m 371\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mChatGeneration\u001b[39m\u001b[33m\"\u001b[39m,\n\u001b[32m--> \u001b[39m\u001b[32m372\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mgenerate_prompt\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 373\u001b[39m \u001b[43m \u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_convert_input\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 374\u001b[39m \u001b[43m \u001b[49m\u001b[43mstop\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 375\u001b[39m \u001b[43m \u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m.\u001b[49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mcallbacks\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 376\u001b[39m \u001b[43m \u001b[49m\u001b[43mtags\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m.\u001b[49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mtags\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 377\u001b[39m \u001b[43m \u001b[49m\u001b[43mmetadata\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m.\u001b[49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mmetadata\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 378\u001b[39m \u001b[43m \u001b[49m\u001b[43mrun_name\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m.\u001b[49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mrun_name\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 379\u001b[39m \u001b[43m \u001b[49m\u001b[43mrun_id\u001b[49m\u001b[43m=\u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m.\u001b[49m\u001b[43mpop\u001b[49m\u001b[43m(\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mrun_id\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 380\u001b[39m \u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 381\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m.generations[\u001b[32m0\u001b[39m][\u001b[32m0\u001b[39m],\n\u001b[32m 382\u001b[39m ).message\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langchain_core\\language_models\\chat_models.py:957\u001b[39m, in \u001b[36mBaseChatModel.generate_prompt\u001b[39m\u001b[34m(self, prompts, stop, callbacks, **kwargs)\u001b[39m\n\u001b[32m 948\u001b[39m \u001b[38;5;129m@override\u001b[39m\n\u001b[32m 949\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mgenerate_prompt\u001b[39m(\n\u001b[32m 950\u001b[39m \u001b[38;5;28mself\u001b[39m,\n\u001b[32m (...)\u001b[39m\u001b[32m 954\u001b[39m **kwargs: Any,\n\u001b[32m 955\u001b[39m ) -> LLMResult:\n\u001b[32m 956\u001b[39m prompt_messages = [p.to_messages() \u001b[38;5;28;01mfor\u001b[39;00m p \u001b[38;5;129;01min\u001b[39;00m prompts]\n\u001b[32m--> \u001b[39m\u001b[32m957\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mgenerate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprompt_messages\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstop\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[43m=\u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langchain_core\\language_models\\chat_models.py:776\u001b[39m, in \u001b[36mBaseChatModel.generate\u001b[39m\u001b[34m(self, messages, stop, callbacks, tags, metadata, run_name, run_id, **kwargs)\u001b[39m\n\u001b[32m 773\u001b[39m \u001b[38;5;28;01mfor\u001b[39;00m i, m \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28menumerate\u001b[39m(input_messages):\n\u001b[32m 774\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m 775\u001b[39m results.append(\n\u001b[32m--> \u001b[39m\u001b[32m776\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_generate_with_cache\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 777\u001b[39m \u001b[43m \u001b[49m\u001b[43mm\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 778\u001b[39m \u001b[43m \u001b[49m\u001b[43mstop\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 779\u001b[39m \u001b[43m \u001b[49m\u001b[43mrun_manager\u001b[49m\u001b[43m=\u001b[49m\u001b[43mrun_managers\u001b[49m\u001b[43m[\u001b[49m\u001b[43mi\u001b[49m\u001b[43m]\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mrun_managers\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01melse\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 780\u001b[39m \u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 781\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 782\u001b[39m )\n\u001b[32m 783\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[32m 784\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m run_managers:\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langchain_core\\language_models\\chat_models.py:1022\u001b[39m, in \u001b[36mBaseChatModel._generate_with_cache\u001b[39m\u001b[34m(self, messages, stop, run_manager, **kwargs)\u001b[39m\n\u001b[32m 1020\u001b[39m result = generate_from_stream(\u001b[38;5;28miter\u001b[39m(chunks))\n\u001b[32m 1021\u001b[39m \u001b[38;5;28;01melif\u001b[39;00m inspect.signature(\u001b[38;5;28mself\u001b[39m._generate).parameters.get(\u001b[33m\"\u001b[39m\u001b[33mrun_manager\u001b[39m\u001b[33m\"\u001b[39m):\n\u001b[32m-> \u001b[39m\u001b[32m1022\u001b[39m result = \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_generate\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 1023\u001b[39m \u001b[43m \u001b[49m\u001b[43mmessages\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstop\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_manager\u001b[49m\u001b[43m=\u001b[49m\u001b[43mrun_manager\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\n\u001b[32m 1024\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1025\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m 1026\u001b[39m result = \u001b[38;5;28mself\u001b[39m._generate(messages, stop=stop, **kwargs)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\langchain_groq\\chat_models.py:504\u001b[39m, in \u001b[36mChatGroq._generate\u001b[39m\u001b[34m(self, messages, stop, run_manager, **kwargs)\u001b[39m\n\u001b[32m 499\u001b[39m message_dicts, params = \u001b[38;5;28mself\u001b[39m._create_message_dicts(messages, stop)\n\u001b[32m 500\u001b[39m params = {\n\u001b[32m 501\u001b[39m **params,\n\u001b[32m 502\u001b[39m **kwargs,\n\u001b[32m 503\u001b[39m }\n\u001b[32m--> \u001b[39m\u001b[32m504\u001b[39m response = \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mclient\u001b[49m\u001b[43m.\u001b[49m\u001b[43mcreate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmessages\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmessage_dicts\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mparams\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 505\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m._create_chat_result(response)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\groq\\resources\\chat\\completions.py:368\u001b[39m, in \u001b[36mCompletions.create\u001b[39m\u001b[34m(self, messages, model, exclude_domains, frequency_penalty, function_call, functions, include_domains, logit_bias, logprobs, max_completion_tokens, max_tokens, metadata, n, parallel_tool_calls, presence_penalty, reasoning_effort, reasoning_format, response_format, search_settings, seed, service_tier, stop, store, stream, temperature, tool_choice, tools, top_logprobs, top_p, user, extra_headers, extra_query, extra_body, timeout)\u001b[39m\n\u001b[32m 181\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mcreate\u001b[39m(\n\u001b[32m 182\u001b[39m \u001b[38;5;28mself\u001b[39m,\n\u001b[32m 183\u001b[39m *,\n\u001b[32m (...)\u001b[39m\u001b[32m 229\u001b[39m timeout: \u001b[38;5;28mfloat\u001b[39m | httpx.Timeout | \u001b[38;5;28;01mNone\u001b[39;00m | NotGiven = NOT_GIVEN,\n\u001b[32m 230\u001b[39m ) -> ChatCompletion | Stream[ChatCompletionChunk]:\n\u001b[32m 231\u001b[39m \u001b[38;5;250m \u001b[39m\u001b[33;03m\"\"\"\u001b[39;00m\n\u001b[32m 232\u001b[39m \u001b[33;03m Creates a model response for the given chat conversation.\u001b[39;00m\n\u001b[32m 233\u001b[39m \n\u001b[32m (...)\u001b[39m\u001b[32m 366\u001b[39m \u001b[33;03m timeout: Override the client-level default timeout for this request, in seconds\u001b[39;00m\n\u001b[32m 367\u001b[39m \u001b[33;03m \"\"\"\u001b[39;00m\n\u001b[32m--> \u001b[39m\u001b[32m368\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_post\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 369\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43m/openai/v1/chat/completions\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 370\u001b[39m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmaybe_transform\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 371\u001b[39m \u001b[43m \u001b[49m\u001b[43m{\u001b[49m\n\u001b[32m 372\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mmessages\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmessages\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 373\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mmodel\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 374\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mexclude_domains\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mexclude_domains\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 375\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mfrequency_penalty\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mfrequency_penalty\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 376\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mfunction_call\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mfunction_call\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 377\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mfunctions\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mfunctions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 378\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43minclude_domains\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43minclude_domains\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 379\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mlogit_bias\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mlogit_bias\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 380\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mlogprobs\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mlogprobs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 381\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mmax_completion_tokens\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmax_completion_tokens\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 382\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mmax_tokens\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmax_tokens\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 383\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mmetadata\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmetadata\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 384\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mn\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 385\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mparallel_tool_calls\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mparallel_tool_calls\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 386\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mpresence_penalty\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mpresence_penalty\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 387\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mreasoning_effort\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mreasoning_effort\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 388\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mreasoning_format\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mreasoning_format\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 389\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mresponse_format\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mresponse_format\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 390\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43msearch_settings\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43msearch_settings\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 391\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mseed\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mseed\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 392\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mservice_tier\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mservice_tier\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 393\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mstop\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 394\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mstore\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mstore\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 395\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mstream\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 396\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mtemperature\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtemperature\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 397\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mtool_choice\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtool_choice\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 398\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mtools\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtools\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 399\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mtop_logprobs\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtop_logprobs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 400\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mtop_p\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtop_p\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 401\u001b[39m \u001b[43m \u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43muser\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43muser\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 402\u001b[39m \u001b[43m \u001b[49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 403\u001b[39m \u001b[43m \u001b[49m\u001b[43mcompletion_create_params\u001b[49m\u001b[43m.\u001b[49m\u001b[43mCompletionCreateParams\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 404\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 405\u001b[39m \u001b[43m \u001b[49m\u001b[43moptions\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmake_request_options\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 406\u001b[39m \u001b[43m \u001b[49m\u001b[43mextra_headers\u001b[49m\u001b[43m=\u001b[49m\u001b[43mextra_headers\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mextra_query\u001b[49m\u001b[43m=\u001b[49m\u001b[43mextra_query\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mextra_body\u001b[49m\u001b[43m=\u001b[49m\u001b[43mextra_body\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m=\u001b[49m\u001b[43mtimeout\u001b[49m\n\u001b[32m 407\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 408\u001b[39m \u001b[43m \u001b[49m\u001b[43mcast_to\u001b[49m\u001b[43m=\u001b[49m\u001b[43mChatCompletion\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 409\u001b[39m \u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstream\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 410\u001b[39m \u001b[43m \u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[43m=\u001b[49m\u001b[43mStream\u001b[49m\u001b[43m[\u001b[49m\u001b[43mChatCompletionChunk\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 411\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\groq\\_base_client.py:1232\u001b[39m, in \u001b[36mSyncAPIClient.post\u001b[39m\u001b[34m(self, path, cast_to, body, options, files, stream, stream_cls)\u001b[39m\n\u001b[32m 1218\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mpost\u001b[39m(\n\u001b[32m 1219\u001b[39m \u001b[38;5;28mself\u001b[39m,\n\u001b[32m 1220\u001b[39m path: \u001b[38;5;28mstr\u001b[39m,\n\u001b[32m (...)\u001b[39m\u001b[32m 1227\u001b[39m stream_cls: \u001b[38;5;28mtype\u001b[39m[_StreamT] | \u001b[38;5;28;01mNone\u001b[39;00m = \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[32m 1228\u001b[39m ) -> ResponseT | _StreamT:\n\u001b[32m 1229\u001b[39m opts = FinalRequestOptions.construct(\n\u001b[32m 1230\u001b[39m method=\u001b[33m\"\u001b[39m\u001b[33mpost\u001b[39m\u001b[33m\"\u001b[39m, url=path, json_data=body, files=to_httpx_files(files), **options\n\u001b[32m 1231\u001b[39m )\n\u001b[32m-> \u001b[39m\u001b[32m1232\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m cast(ResponseT, \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcast_to\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mopts\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstream\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[43m)\u001b[49m)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\groq\\_base_client.py:1034\u001b[39m, in \u001b[36mSyncAPIClient.request\u001b[39m\u001b[34m(self, cast_to, options, stream, stream_cls)\u001b[39m\n\u001b[32m 1031\u001b[39m err.response.read()\n\u001b[32m 1033\u001b[39m log.debug(\u001b[33m\"\u001b[39m\u001b[33mRe-raising status error\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m-> \u001b[39m\u001b[32m1034\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mself\u001b[39m._make_status_error_from_response(err.response) \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[32m 1036\u001b[39m \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[32m 1038\u001b[39m \u001b[38;5;28;01massert\u001b[39;00m response \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m, \u001b[33m\"\u001b[39m\u001b[33mcould not resolve response (should never happen)\u001b[39m\u001b[33m\"\u001b[39m\n", + "\u001b[31mBadRequestError\u001b[39m: Error code: 400 - {'error': {'message': \"'messages.1' : for 'role:user' the following must be satisfied[('messages.1.content' : one of the following must be satisfied[('messages.1.content' : value must be a string) OR ('messages.1.content.1' : one of the following must be satisfied[('messages.1.content.1.type' : value is not one of the allowed values ['text']) OR ('messages.1.content.1.image_url.url' : value must be a string)])])]\", 'type': 'invalid_request_error'}}", + "During task with name 'agent' and id 'd3c03718-80b8-3216-bee9-f8866c53324c'", + "During task with name 'pseudo_generator' and id '2f1d9f6f-92c5-7679-5e01-05379a6240c8'" + ] + } + ], + "source": [ + "pdf_file_path = r\"D:\\DEV PATEL\\2025\\Scratch_Vision\\images\\code_blocks_with_script_for_written.pdf\"\n", + "with open(pdf_file_path, \"rb\") as pdf_file:\n", + " pdf_bytes = pdf_file.read()\n", + " \n", + "pdf_stream = io.BytesIO(pdf_bytes)\n", + "\n", + "\n", + "if isinstance(pdf_stream, io.BytesIO):\n", + " images = convert_pdf_stream_to_images(pdf_stream, dpi=300)\n", + "else:\n", + " images = convert_from_path(pdf_stream, dpi=300)\n", + "\n", + "num_pages = len(images)\n", + "print(f\"PDF has {num_pages} pages\")\n", + "\n", + "class GameState(TypedDict):\n", + " project_json: dict\n", + " description: str\n", + " project_id: str\n", + " project_image: str\n", + " pseudo_code: dict\n", + " temp_pseudo_code: list\n", + " action_plan: Optional[Dict]\n", + " temporary_node: Optional[Dict]\n", + " page_count: int\n", + " processing: bool\n", + " \n", + "# Node 7: For multiple pages\n", + "def processed_page_node(state: GameState):\n", + " image = state.get(\"project_image\", \") \n", + " cnt =state[\"page_count\"]\n", + " print(f\"the page processed for page {cnt}\")\n", + " if cnt then` or `(((x position)) * (1))`.\n", + " - **Boolean blocks** in conditions must be inside `< >`, including nested ones: `>`, `< and >`,`< or >`.\n", + " - **Other Boolean blocks** in conditions must be inside `< >`, including nested ones or values or variables: `<(block/value/variable) * (block/value/variable)>`,`<(block/value/variable) < (block/value/variable)>`, and example of another variable`<[apple v] contains [a v]?>`.\n", + " - **Operator expressions** must use explicit Scratch operator blocks, e.g.:\n", + " ```\n", + " (([ballSpeed v]) * (1.1))\n", + " ```\n", + " - **Every hat block script must end** with a final `end` on its own line.\n", + " \n", + " 3. **Pseudo‑code formatting**:\n", + " - Represent each block or nested block on its own line.\n", + " - **Indent nested blocks by 4 spaces under their parent (`forever`, `if`, etc.).This is a critical requirement.**\n", + " - No comments or explanatory text—just the block sequence.\n", + " - a natural language breakdown of each step taken after the event, formatted as a multi-line string representing pseudo-code. Ensure clarity and granularity—each described action should map closely to a Scratch block or tight sequence. \n", + "\n", + " 4. **Logic content**:\n", + " - Build clear flow for mechanics (movement, jumping, flying, scoring, collisions).\n", + " - Match each action closely to a Scratch block or tight sequence.\n", + " - Do **NOT** include any justification or comments—only the raw logic.\n", + "\n", + " 5. **Examples for reference**: \n", + " **Correct** pattern for a simple start script:\n", + " ```\n", + " when green flag clicked\n", + " switch backdrop to [blue sky v]\n", + " set [score v] to (0)\n", + " show variable [score v]\n", + " broadcast [Game Start v]\n", + " end\n", + " ```\n", + " **Correct** pattern for updating the high score variable handling:\n", + " ```\n", + " when I receive [Game Over v]\n", + " if <((score)) > (([High Score v]))> then\n", + " set [High Score v] to ([score v])\n", + " end\n", + " switch backdrop to [Game Over v]\n", + " end\n", + " ```\n", + " **Correct** pattern for level up and increase difficulty use:\n", + " ```\n", + " when I receive [Level Up v]\n", + " change [level v] by (1)\n", + " set [ballSpeed v] to ((([ballSpeed v]) * (1.1)))\n", + " end\n", + " ```\n", + " **Correct** pattern for jumping mechanics use:\n", + " ```\n", + " when [space v] key pressed\n", + " if <((y position)) = (-100)> then\n", + " repeat (5)\n", + " change y by (100)\n", + " wait (0.1) seconds\n", + " change y by (-100)\n", + " wait (0.1) seconds\n", + " end\n", + " end\n", + " end\n", + " ```\n", + " **Correct** pattern for continuos moving objects use:\n", + " ```\n", + " when green flag clicked\n", + " go to x: (240) y: (-100)\n", + " set [speed v] to (-5)\n", + " show variable [speed v]\n", + " forever\n", + " change x by ([speed v])\n", + " if <((x position)) < (-240)> then\n", + " go to x: (240) y: (-100) \n", + " end\n", + " end\n", + " end\n", + " ```\n", + " 6. **Donot** add any explaination of logic or comments to justify or explain just put the logic content in the json.\n", + " 7. **Output**: \n", + " Return **only** a JSON object, using double quotes everywhere:\n", + " ```json\n", + " {{\n", + " \"refined_logic\":{{\n", + " \"name_variable\": 'Value of \"Sript for: \"',\n", + " \"pseudocode\":\"…your fully‑formatted pseudo‑code here…\",\n", + " }}\n", + " }}\n", + " ```\n", + " \"\"\n", + " image_input = {\n", + " \"type\": \"image_url\",\n", + " \"image_url\": {\n", + " # \"url\": f\"data:image/png;base64,{image}\"\n", + " \"url\": clean_base64_for_model(image[cnt])\n", + " }\n", + " }\n", + "\n", + " content = [\n", + " {\"type\": \"text\", \"text\": refinement_prompt},\n", + " image_input\n", + " ]\n", + " try:\n", + " #Invoke the main agent for logic refinement and relationship identification\n", + " response = agent.invoke({\"messages\": [{\"role\": \"user\", \"content\": content}]})\n", + " llm_output_raw = response[\"messages\"][-1].content.strip()\n", + " #print(f\"llm_output_raw: {response}\")\n", + " parsed_llm_output = extract_json_from_llm_response(llm_output_raw) \n", + " result = parsed_llm_output \n", + " print(f\"result:\\n\\n {result}\")\n", + " \n", + " except json.JSONDecodeError as error_json:\n", + " # If JSON parsing fails, use the json resolver agent\n", + " correction_prompt = (\n", + " \"Your task is to correct the provided JSON string to ensure it is **syntactically perfect and adheres strictly to JSON rules**.\\n\"\n", + " \"It must be a JSON object with `refined_logic` (string) and `block_relationships` (array of objects).\\n\"\n", + " f\"- **Error Details**: {error_json}\\n\\n\"\n", + " \"**Strict Instructions for your response:**\\n\"\n", + " \"1. **ONLY** output the corrected JSON. Do not include any other text or explanations.\\n\"\n", + " \"2. Ensure all keys and string values are enclosed in **double quotes**. Escape internal quotes (`\\\\`).\\n\"\n", + " \"3. No trailing commas. Correct nesting.\\n\\n\"\n", + " \"Here is the problematic JSON string to correct:\\n\"\n", + " f\"```json\\n{llm_output_raw}\\n```\\n\"\n", + " \"Corrected JSON:\\n\"\n", + " )\n", + " try:\n", + " correction_response = agent_json_resolver.invoke({\"messages\": [{\"role\": \"user\", \"content\": correction_prompt}]})\n", + " corrected_output = extract_json_from_llm_response(correction_response['messages'][-1].content)\n", + " #block_relationships = corrected_output.get(\"block_relationships\", [])\n", + " result = corrected_output \n", + " print(f\"result:\\n\\n {result}\") \n", + " #print(\"content----------->\",content) \n", + " except Exception as e_corr:\n", + " logger.error(f\"Failed to correct JSON output for even after retry: {e_corr}\") \n", + " \n", + " # Update the original action_plan in the state with the refined version\n", + " state[\"pseudo_code\"] = result\n", + " state[\"temp_pseudo_code\"] += [result]\n", + " with open(\"debug_state.json\", \"w\", encoding=\"utf-8\") as f:\n", + " json.dump(state, f, indent=2, ensure_ascii=False)\n", + " logger.info(\"Plan refinement and block relation analysis completed for all plans.\")\n", + " return state\n", + "\n", + "# Node 2: planner node\n", + "def overall_planner_node(state: GameState):\n", + " \"\"\n", + " Generates a comprehensive action plan for sprites, including detailed Scratch block information.\n", + " This node acts as an overall planner, leveraging knowledge of all block shapes and categories.\n", + " \"\"\n", + " logger.info(\"--- Running OverallPlannerNode ---\")\n", + "\n", + " project_json = state[\"project_json\"]\n", + " raw = state.get(\"pseudo_code\", {})\n", + " refined_logic_data = raw.get(\"refined_logic\", {})\n", + " sprite_name = refined_logic_data.get(\"name_variable\", \"\")\n", + " pseudo = refined_logic_data.get(\"pseudocode\", \")\n", + "\n", + " # MODIFICATION 1: Include 'Stage' in the list of names to plan for.\n", + " # It's crucial to ensure 'Stage' is always present for its global role.\n", + " target_names = [t[\"name\"] for t in project_json[\"targets\"]]\n", + "\n", + " # MODIFICATION 2: Get sprite positions, providing default for Stage as it doesn't have x,y\n", + " # sprite_positions = {}\n", + " # for target in project_json[\"targets\"]:\n", + " # if not target[\"isStage\"]:\n", + " # sprite_positions[target[\"name\"]] = {\"x\": target.get(\"x\", 0), \"y\": target.get(\"y\", 0)}\n", + " # else:\n", + " # sprite_positions[target[\"name\"]] = {\"x\": \"N/A\", \"y\": \"N/A\"} # Stage doesn't have positional coordinates\n", + "\n", + " # declaration_plan = state[\"declaration_plan\"]\n", + "\n", + " planning_prompt = f\"\"\n", + "Generate a detailed action plan for the game's sprites and stage based on the given pseudo-code and sprite details for the given sprite name and .\n", + "\n", + "Description: \n", + " **Sprite_name**: {sprite_name} \n", + " **and its corresponding Pseudo_code:** \n", + " '{pseudo}'\n", + "\n", + "[Note: Make sure you just refine the pseudo code by correting mistake and adding the missing opcode if any and *Do not* generate any new logic]\n", + "----\n", + "**Targets in Game (Sprites and Stage) available in project_json:** {', '.join(target_names)}\n", + "\n", + "--- Scratch 3.0 Block Reference ---\n", + "This section provides a comprehensive reference of Scratch 3.0 blocks, categorized by shape, including their opcodes and functional descriptions. Use this to accurately identify block types and behavior.\n", + "\n", + "### Hat Blocks\n", + "Description: {hat_description}\n", + "Blocks:\n", + "{hat_opcodes_functionalities}\n", + "\n", + "### Boolean Blocks\n", + "Description: {boolean_description}\n", + "Blocks:\n", + "{boolean_opcodes_functionalities}\n", + "\n", + "### C Blocks\n", + "Description: {c_description}\n", + "Blocks:\n", + "{c_opcodes_functionalities}\n", + "\n", + "### Cap Blocks\n", + "Description: {cap_description}\n", + "Blocks:\n", + "{cap_opcodes_functionalities}\n", + "\n", + "### Reporter Blocks\n", + "Description: {reporter_description}\n", + "Blocks:\n", + "{reporter_opcodes_functionalities}\n", + "\n", + "### Stack Blocks\n", + "Description: {stack_description}\n", + "Blocks:\n", + "{stack_opcodes_functionalities}\n", + "\n", + "-----------------------------------\n", + "\n", + "Your task is to use the `Sprite_name` given and `Pseudo_code` and add it to the specific target name and define the primary actions and movements.\n", + "The output should be a JSON object with a single key 'action_overall_flow'. Each key inside this object should be a sprite or 'Stage' name (e.g., 'Player', 'Enemy', 'Stage'), and its value must include a 'description' and a list of 'plans'.\n", + "Each plan must include a **single Scratch Hat Block** (e.g., 'event_whenflagclicked') to start scratch project and should contain:\n", + "1. **'event'**: the exact `opcode` of the hat block that initiates the logic.\n", + "[NOTE: INSTRUCTIONN TO FOLLOW IF PSEUDO_CODE HAVING PROBLEM ]\n", + "2. **'logic'**: a natural language breakdown of each step taken after the event, formatted as a multi-line string representing pseudo-code. Ensure clarity and granularity—each described action should map closely to a Scratch block or tight sequence.\n", + " - Use 'forever: ...' or 'repeat(10): ...' to prefix repeating logic suitable taking reference from the C blocks.\n", + " - Use Scratch-consistent verbs: 'move', 'change', 'wait', 'hide', 'show', 'say', 'glide', etc.\n", + " - **Numeric values** `(e.g., 0, 5, 0.2, -130)` **must** be in parentheses: `(0)`, `(5)`, `(0.2)`, `(-130)`.\n", + " - **AlphaNumeric values** `(e.g., hello, say 5, 4, hi!)` **must** be in parentheses: `(hello)`, `(say 5)`, `(4)`, `(hi!)`.\n", + " - **Variables** must be in the form `[variable v]` (e.g., `[score v]`), even when used inside expressions two example use `set [score v] to (1)` or `show variable ([speed v])`.\n", + " - **Dropdown options** must be in the form `[option v]` (e.g., `[Game Start v]`, `[blue sky v]`). example use `when [space v] key pressed`.\n", + " - **Reporter blocks** used as inputs must be double‑wrapped: `((x position))`, `((y position))`. example use `if <((y position)) = (-130)> then` or `(((x position)) * (1))`.\n", + " - **Boolean blocks** in conditions must be inside `< >`, including nested ones: `>`, `< and >`,`< or >`.\n", + " - **Other Boolean blocks** in conditions must be inside `< >`, including nested ones or values or variables: `<(block/value/variable) * (block/value/variable)>`,`<(block/value/variable) < (block/value/variable)>`, and example of another variable`<[apple v] contains [a v]?>`.\n", + " - **Operator expressions** must use explicit Scratch operator blocks, e.g.:\n", + " ```\n", + " (([ballSpeed v]) * (1.1))\n", + " ```\n", + " - **Every hat block script must end** with a final `end` on its own line.\n", + " - **Indent nested blocks by 4 spaces under their parent (`forever`, `if`, etc.).This is a critical requirement.**\n", + "3. **Opcode Lists**: include relevant Scratch opcodes grouped under `motion`, `control`, `operator`, `sensing`, `looks`, `sounds`, `events`, and `data`. List only the non-empty categories. Use exact opcodes.\n", + "4. Few Example of content of logics inside for a specific plan as scratch pseudo-code:\n", + " - example 1[continues moving objects]:\n", + " ```\n", + " when green flag clicked\n", + " go to x: (240) y: (-100)\n", + " set [speed v] to (-5)\n", + " show variable [speed v]\n", + " forever\n", + " change x by ([speed v])\n", + " if <((x position)) < (-240)> then \n", + " go to x: (240) y: (-100) \n", + " end\n", + " end\n", + " end\n", + " ```\n", + " - example 2[jumping script of an plan]: \n", + " ``` \n", + " when [space v] key pressed\n", + " if <((y position)) = (-100)> then \n", + " repeat (5)\n", + " change y by (100) \n", + " wait (0.1) seconds\n", + " change y by (-100) \n", + " wait (0.1) seconds\n", + " end\n", + " end\n", + " end\n", + " ```\n", + " - example 3 [pattern for level up and increase difficulty]:\n", + " ```\n", + " when I receive [Level Up v]\n", + " change [level v] by (1)\n", + " set [ballSpeed v] to ((([ballSpeed v]) * (1.1)))\n", + " end\n", + " ```\n", + "5. Use target names exactly as listed in `Targets in Game`. Do NOT rename or invent new targets.\n", + "6. Ensure the plan reflects accurate opcode usage derived strictly from the block reference above.\n", + "7. Few shot Example structure for 'action_overall_flow':\n", + "```json\n", + "{{\n", + " \"action_overall_flow\": {{\n", + " \"Stage\": {{\n", + " \"description\": \"Background and global game state management, including broadcasts, rewards, and score.\",\n", + " \"plans\": [\n", + " {{\n", + " \"event\": \"event_whenflagclicked\",\n", + " \"logic\": \"when green flag clicked\\n switch backdrop to [backdrop1 v]\\n set [score v] to 0\\n show variable [score v]\\n broadcast [Game Start v]\\nend\",\n", + " \"motion\": [],\n", + " \"control\": [],\n", + " \"operator\": [],\n", + " \"sensing\": [],\n", + " \"looks\": [\n", + " \"looks_switchbackdropto\"\n", + " ],\n", + " \"sounds\": [],\n", + " \"events\": [\n", + " \"event_broadcast\"\n", + " ],\n", + " \"data\": [\n", + " \"data_setvariableto\",\n", + " \"data_showvariable\"\n", + " ]\n", + " }},\n", + " {{\n", + " \"event\": \"event_whenbroadcastreceived\",\n", + " \"logic\": \"when I receive [Game Over v]\\n if <(score) > (High Score)> then\\n set [High Score v] to (score)\\n end\\n switch backdrop to [HighScore v]\\nend\",\n", + " \"motion\": [],\n", + " \"control\": [\n", + " \"control_if\"\n", + " ],\n", + " \"operator\": [\n", + " \"operator_gt\"\n", + " ],\n", + " \"sensing\": [],\n", + " \"looks\": [\n", + " \"looks_switchbackdropto\"\n", + " ],\n", + " \"sounds\": [],\n", + " \"events\": [],\n", + " \"data\": [\n", + " \"data_setvariableto\"\n", + " ]\n", + " }}\n", + " ]\n", + " }},\n", + " \"Sprite1\": {{\n", + " \"description\": \"Main character (cat) actions\",\n", + " \"plans\": [\n", + " {{\n", + " \"event\": \"event_whenflagclicked\",\n", + " \"logic\": \"when green flag clicked\\n go to x: 240 y: -100\\nend\",\n", + " \"motion\": [\n", + " \"motion_gotoxy\"\n", + " ],\n", + " \"control\": [],\n", + " \"operator\": [],\n", + " \"sensing\": [],\n", + " \"looks\": [],\n", + " \"sounds\": [],\n", + " \"events\": [],\n", + " \"data\": []\n", + " }},\n", + " {{\n", + " \"event\": \"event_whenkeypressed\",\n", + " \"logic\": \"when [space v] key pressed\\n repeat (10)\\n change y by (20)\\n wait (0.1) seconds\\n change y by (-20)\\n end\\nend\",\n", + " \"motion\": [\n", + " \"motion_changeyby\"\n", + " ],\n", + " \"control\": [\n", + " \"control_repeat\",\n", + " \"control_wait\"\n", + " ],\n", + " \"operator\": [],\n", + " \"sensing\": [],\n", + " \"looks\": [],\n", + " \"sounds\": [],\n", + " \"events\": [],\n", + " \"data\": []\n", + " }}\n", + " ]\n", + " }},\n", + " \"soccer ball\": {{\n", + " \"description\": \"Obstacle movement and interaction\",\n", + " \"plans\": [\n", + " {{\n", + " \"event\": \"event_whenflagclicked\",\n", + " \"logic\": \"when green flag clicked\\n go to x: 240 y: -135\\n forever\\n glide 2 seconds to x: -240 y: -135\\n if <(x position) < -235> then\\n set x to 240\\n end\\n if then\\n broadcast [Game Over v]\\n stop [all v]\\n end\\n end\\nend\",\n", + " \"motion\": [\n", + " \"motion_gotoxy\",\n", + " \"motion_glidesecstoxy\",\n", + " \"motion_xposition\",\n", + " \"motion_setx\"\n", + " ],\n", + " \"control\": [\n", + " \"control_forever\",\n", + " \"control_if\",\n", + " \"control_stop\"\n", + " ],\n", + " \"operator\": [\n", + " \"operator_lt\"\n", + " ],\n", + " \"sensing\": [\n", + " \"sensing_istouching\",\n", + " \"sensing_touchingobjectmenu\"\n", + " ],\n", + " \"looks\": [],\n", + " \"sounds\": [],\n", + " \"events\": [\n", + " \"event_broadcast\"\n", + " ],\n", + " \"data\": []\n", + " }}\n", + " ]\n", + " }}\n", + "\n", + " }}\n", + "}}\n", + "```\n", + "8. Based on the provided context, generate the `action_overall_flow`.\n", + " - Maintain the **exact JSON structure** shown above.\n", + " - All `logic` fields must be **clear and granular**.\n", + " - Only include opcode categories that contain relevant opcodes.\n", + " - Ensure that each opcode matches its intended Scratch functionality.\n", + " - If feedback suggests major change, **rethink the entire plan** for the affected sprite(s).\n", + " - If feedback is minor, make precise, minimal improvements only.\n", + "\"\"\n", + "\n", + " try:\n", + " response = agent.invoke({\"messages\": [{\"role\": \"user\", \"content\": planning_prompt}]})\n", + " print(\"Raw response from LLM [OverallPlannerNode 1]:\",response)\n", + " raw_response = response[\"messages\"][-1].content#strip_noise(response[\"messages\"][-1].content)\n", + " print(\"Raw response from LLM [OverallPlannerNode 2]:\", raw_response) # Uncomment for debugging\n", + " # json debugging and solving\n", + " try:\n", + " overall_plan = extract_json_from_llm_response(raw_response)\n", + " except json.JSONDecodeError as error_json:\n", + " logger.error(\"Failed to extract JSON from LLM response. Attempting to correct the response.\")\n", + " # Use the JSON resolver agent to fix the response\n", + " correction_prompt = (\n", + " \"Your task is to correct the provided JSON string to ensure it is **syntactically perfect and adheres strictly to JSON rules**.\\n\"\n", + " \"Carefully review the JSON for any errors, especially focusing on the reported error at:\\n\"\n", + " f\"- **Error Details**: {error_json}\\n\\n\"\n", + " \"**Strict Instructions for your response:**\\n\"\n", + " \"1. **ONLY** output the corrected JSON. Do not include any other text, comments, or explanations outside the JSON.\\n\"\n", + " \"2. Ensure all property names (keys) are enclosed in **double quotes**.\\n\"\n", + " \"3. Ensure string values are correctly enclosed in **double quotes** and any internal special characters (like newlines `\\\\n`, tabs `\\\\t`, backslashes `\\\\\\\\`, or double quotes `\\\\`) are properly **escaped**.\\n\"\n", + " \"4. Verify that there are **no extra commas**, especially between key-value pairs or after the last element in an object or array.\\n\"\n", + " \"5. Ensure proper nesting and matching of curly braces `{}` and square brackets `[]`.\\n\"\n", + " \"6. **Crucially, remove any extraneous characters or duplicate closing braces outside the main JSON object.**\\n\"\n", + " \"7. The corrected JSON must be a **complete and valid** JSON object.\\n\\n\"\n", + " \"Here is the problematic JSON string to correct:\\n\"\n", + " \"```json\\n\"\n", + " f\"{raw_response}\\n\"\n", + " \"```\\n\"\n", + " \"Corrected JSON:\\n\"\n", + " )\n", + " correction_response = agent_json_resolver.invoke({\"messages\": [{\"role\": \"user\", \"content\": correction_prompt}]})\n", + " print(f\"[JSON CORRECTOR RESPONSE AT OVERALLPLANNERNODE ]: {correction_response['messages'][-1].content}\")\n", + " overall_plan= extract_json_from_llm_response(correction_response['messages'][-1].content)#strip_noise(correction_response[\"messages\"][-1].content))\n", + " \n", + " state[\"action_plan\"] = overall_plan\n", + " logger.info(\"Overall plan generated by OverallPlannerNode.\")\n", + " \n", + " # with open(\"debug_state.json\", \"w\", encoding=\"utf-8\") as f:\n", + " # json.dump(state, f, indent=2, ensure_ascii=False)\n", + " \n", + " return state\n", + " \n", + " except Exception as e:\n", + " logger.error(f\"Error in OverallPlannerNode: {e}\")\n", + " raise\n", + "\n", + "# Node 3: refiner node\n", + "def refined_planner_node(state: GameState):\n", + " \"\"\n", + " Refines the action plan based on validation feedback and game description.\n", + " \"\"\n", + " logger.info(\"--- Running RefinedPlannerNode ---\")\n", + " raw = state.get(\"pseudo_code\", {})\n", + " refined_logic_data = raw.get(\"refined_logic\", {})\n", + " sprite_name = refined_logic_data.get(\"name_variable\", \"\")\n", + " pseudo = refined_logic_data.get(\"pseudocode\", \")\n", + " #detailed_game_description = state.get(\"detailed_game_description\", state.get(\"description\", \"A game.\"))\n", + " current_action_plan = state.get(\"action_plan\", {})\n", + " print(f\"[current_action_plan before refinement] on ({state.get('iteration_count', 0)}): {json.dumps(current_action_plan, indent=2)}\")\n", + " plan_validation_feedback = state.get(\"plan_validation_feedback\", \"No specific feedback provided. Assume general refinement is needed.\")\n", + " project_json = state[\"project_json\"]\n", + " target_names = [t[\"name\"] for t in project_json[\"targets\"]]\n", + "\n", + " # MODIFICATION 2: Get sprite positions, providing default for Stage as it doesn't have x,y\n", + " sprite_positions = {}\n", + " for target in project_json[\"targets\"]:\n", + " if not target[\"isStage\"]:\n", + " sprite_positions[target[\"name\"]] = {\"x\": target.get(\"x\", 0), \"y\": target.get(\"y\", 0)}\n", + " else:\n", + " sprite_positions[target[\"name\"]] = {\"x\": \"N/A\", \"y\": \"N/A\"} # Stage doesn't have positional coordinates\n", + "\n", + " #declaration_plan = state[\"declaration_plan\"]\n", + "\n", + " refinement_prompt = f\"\"\n", + "Refine and correct the JSON object `action_overall_flow` so that it fully aligns with the detailed game description, sprite positions, variable/broadcast declarations, and Scratch 3.0 block reference—while also validating for common formatting or opcode errors.\n", + "\n", + "Here is the overall script available:\n", + "**Sprite name**: {sprite_name} \n", + "**and its corresponding Pseudo_code:** \n", + "'{pseudo}'\n", + "\n", + "[Note: Make sure you just refine the pseudo code by correting mistake and adding the missing opcode if any and *Do not* generate any new logic]\n", + "---\n", + "**Targets in Game (Sprites and Stage):** {', '.join(target_names)} \n", + "**Current action plan:** \n", + "{current_action_plan}\n", + "\n", + "**Validation Feedback:** \n", + "'{plan_validation_feedback}'\n", + "\n", + "--- Scratch 3.0 Block Reference ---\n", + "### Hat Blocks\n", + "{hat_opcodes_functionalities}\n", + "\n", + "### Boolean Blocks\n", + "{boolean_opcodes_functionalities}\n", + "\n", + "### C Blocks\n", + "{c_opcodes_functionalities}\n", + "\n", + "### Cap Blocks\n", + "{cap_opcodes_functionalities}\n", + "\n", + "### Reporter Blocks\n", + "{reporter_opcodes_functionalities}\n", + "\n", + "### Stack Blocks\n", + "{stack_opcodes_functionalities}\n", + "\n", + "-----------------------------------\n", + "\n", + "* **Your task is to align to description, refine and correct the JSON object 'action_overall_flow'.**\n", + "Use sprite names exactly as provided in `sprite_names` (e.g., 'Sprite1', 'soccer ball'); and also the stage, do **NOT** rename them.\n", + "1. **'event'**: the exact `opcode` of the hat block that initiates the logic.\n", + "2. **'logic'**: a natural language breakdown of each step taken after the event, formatted as a multi-line string representing pseudo-code. Ensure clarity and granularity—each described action should map closely to a Scratch block or tight sequence.\n", + " - Do **NOT** include any justification or comments—only the raw logic.\n", + " - Use 'forever: ...' or 'repeat(10): ...' to prefix repeating logic suitable taking reference from the C blocks.\n", + " - Use Scratch-consistent verbs: 'move', 'change', 'wait', 'hide', 'show', 'say', 'glide', etc.\n", + " - **Numeric values** `(e.g., 0, 5, 0.2, -130)` **must** be in parentheses: `(0)`, `(5)`, `(0.2)`, `(-130)`.\n", + " - **AlphaNumeric values** `(e.g., hello, say 5, 4, hi!)` **must** be in parentheses: `(hello)`, `(say 5)`, `(4)`, `(hi!)`.\n", + " - **Variables** must be in the form `[variable v]` (e.g., `[score v]`), even when used inside expressions two example use `set [score v] to (1)` or `show variable ([speed v])`.\n", + " - **Dropdown options** must be in the form `[option v]` (e.g., `[Game Start v]`, `[blue sky v]`). example use `when [space v] key pressed`.\n", + " - **Reporter blocks** used as inputs must be double‑wrapped: `((x position))`, `((y position))`. example use `if <((y position)) = (-130)> then` or `(((x position)) * (1))`.\n", + " - **Boolean blocks** in conditions must be inside `< >`, including nested ones: `>`, `< and >`,`< or >`.\n", + " - **Other Boolean blocks** in conditions must be inside `< >`, including nested ones or values or variables: `<(block/value/variable) * (block/value/variable)>`,`<(block/value/variable) < (block/value/variable)>`, and example of another variable`<[apple v] contains [a v]?>`.\n", + " - **Operator expressions** must use explicit Scratch operator blocks, e.g.:\n", + " ```\n", + " (([ballSpeed v]) * (1.1))\n", + " ```\n", + " - **Every hat block script must end** with a final `end` on its own line.\n", + "3. **Validation & Formatting Checks** \n", + " - **Opcode Coverage**: Ensure every action in `logic` has a matching opcode in the lists. \n", + " - **Bracket Nesting**: Confirm every `(` has a matching `)`, e.g., `(pick random (100) to (-100))`. \n", + " - **Operator Formatting**: Validate that operators use Scratch operator blocks, not inline math. \n", + " - **Common Errors**: \n", + " - `pick random (100,-100)` → `(pick random (100) to (-100))` \n", + " - Missing `end` at script conclusion \n", + " - Unwrapped reporter inputs or Boolean tests\n", + "4. **Opcode Lists**: include relevant Scratch opcodes grouped under `motion`, `control`, `operator`, `sensing`, `looks`, `sounds`, `events`, and `data`. List only the non-empty categories. Use exact opcodes from the given Scratch 3.0 Block Reference.\n", + "5. Few Example of content of logics inside for a specific plan as scratch pseudo-code:\n", + " - example 1[continues moving objects]:\n", + " ```\n", + " when green flag clicked\n", + " go to x: (240) y: (-100)\n", + " set [speed v] to (-5)\n", + " show variable [speed v]\n", + " forever\n", + " change x by ([speed v])\n", + " if <((x position)) < (-240)> then \n", + " go to x: (240) y: (-100) \n", + " end\n", + " end\n", + " end\n", + " ```\n", + " - example 2[jumping script of an plan]: \n", + " ``` \n", + " when [space v] key pressed\n", + " if <((y position)) = (-100)> then \n", + " repeat (5)\n", + " change y by (100) \n", + " wait (0.1) seconds\n", + " change y by (-100) \n", + " wait (0.1) seconds\n", + " end\n", + " end\n", + " end\n", + " ```\n", + " - example 3 [pattern for level up and increase difficulty]:\n", + " ```\n", + " when I receive [Level Up v]\n", + " change [level v] by (1)\n", + " set [ballSpeed v] to ((([ballSpeed v]) * (1.1)))\n", + " end\n", + " ```\n", + "6. Use target names exactly as listed in `Targets in Game`. Do NOT rename or invent new targets.\n", + "7. Ensure the plan reflects accurate opcode usage derived strictly from the block reference above.\n", + "8. Few shot Example structure for 'action_overall_flow':\n", + "```json\n", + " {{\n", + " \"action_overall_flow\": {{\n", + " \"Stage\": {{\n", + " \"description\": \"Background and global game state management, including broadcasts, rewards, and score.\",\n", + " \"plans\": [\n", + " {{\n", + " \"event\": \"event_whenflagclicked\",\n", + " \"logic\": \"when green flag clicked\\n switch backdrop to [backdrop1 v]\\n set [score v] to 0\\n show variable [score v]\\n broadcast [Game Start v]\\nend\",\n", + " \"motion\": [],\n", + " \"control\": [],\n", + " \"operator\": [],\n", + " \"sensing\": [],\n", + " \"looks\": [\n", + " \"looks_switchbackdropto\"\n", + " ],\n", + " \"sounds\": [],\n", + " \"events\": [\n", + " \"event_broadcast\"\n", + " ],\n", + " \"data\": [\n", + " \"data_setvariableto\",\n", + " \"data_showvariable\"\n", + " ]\n", + " }},\n", + " {{\n", + " \"event\": \"event_whenbroadcastreceived\",\n", + " \"logic\": \"when I receive [Game Over v]\\n if <(score) > (High Score)> then\\n set [High Score v] to (score)\\n end\\n switch backdrop to [HighScore v]\\nend\",\n", + " \"motion\": [],\n", + " \"control\": [\n", + " \"control_if\"\n", + " ],\n", + " \"operator\": [\n", + " \"operator_gt\"\n", + " ],\n", + " \"sensing\": [],\n", + " \"looks\": [\n", + " \"looks_switchbackdropto\"\n", + " ],\n", + " \"sounds\": [],\n", + " \"events\": [],\n", + " \"data\": [\n", + " \"data_setvariableto\"\n", + " ]\n", + " }}\n", + " ]\n", + " }},\n", + " \"Sprite1\": {{\n", + " \"description\": \"Main character (cat) actions\",\n", + " \"plans\": [\n", + " {{\n", + " \"event\": \"event_whenflagclicked\",\n", + " \"logic\": \"when green flag clicked\\n go to x: 240 y: -100\\nend\\n\",\n", + " \"motion\": [\n", + " \"motion_gotoxy\"\n", + " ],\n", + " \"control\": [],\n", + " \"operator\": [],\n", + " \"sensing\": [],\n", + " \"looks\": [],\n", + " \"sounds\": [],\n", + " \"events\": [],\n", + " \"data\": []\n", + " }},\n", + " {{\n", + " \"event\": \"event_whenkeypressed\",\n", + " \"logic\": \"when [space v] key pressed\\n repeat (10)\\n change y by (20)\\n wait (0.1) seconds\\n change y by (-20)\\n end\\nend\",\n", + " \"motion\": [\n", + " \"motion_changeyby\"\n", + " ],\n", + " \"control\": [\n", + " \"control_repeat\",\n", + " \"control_wait\"\n", + " ],\n", + " \"operator\": [],\n", + " \"sensing\": [],\n", + " \"looks\": [],\n", + " \"sounds\": [],\n", + " \"events\": [],\n", + " \"data\": []\n", + " }}\n", + " ]\n", + " }},\n", + " \"soccer ball\": {{\n", + " \"description\": \"Obstacle movement and interaction\",\n", + " \"plans\": [\n", + " {{\n", + " \"event\": \"event_whenflagclicked\",\n", + " \"logic\": \"when green flag clicked\\n go to x: 240 y: -135\\n forever\\n glide 2 seconds to x: -240 y: -135\\n if <(x position) < -235> then\\n set x to 240\\n end\\n if then\\n broadcast [Game Over v]\\n stop [all v]\\n end\\n end\\nend\",\n", + " \"motion\": [\n", + " \"motion_gotoxy\",\n", + " \"motion_glidesecstoxy\",\n", + " \"motion_xposition\",\n", + " \"motion_setx\"\n", + " ],\n", + " \"control\": [\n", + " \"control_forever\",\n", + " \"control_if\",\n", + " \"control_stop\"\n", + " ],\n", + " \"operator\": [\n", + " \"operator_lt\"\n", + " ],\n", + " \"sensing\": [\n", + " \"sensing_istouching\",\n", + " \"sensing_touchingobjectmenu\"\n", + " ],\n", + " \"looks\": [],\n", + " \"sounds\": [],\n", + " \"events\": [\n", + " \"event_broadcast\"\n", + " ],\n", + " \"data\": []\n", + " }}\n", + " ]\n", + " }}\n", + " }}\n", + "}}\n", + "```\n", + "9. Use the validation feedback to address errors, fill in missing logic, or enhance clarity.\n", + " example of few possible improvements: 1.event_whenflagclicked is used to control sprite but its used for actual start scratch project and reset scratch. 2. looping like forever used where we should use iterative. 3. missing of for variable we used in the block\n", + " - Maintain the **exact JSON structure** shown above.\n", + " - All `logic` fields must be **clear and granular**.\n", + " - Only include opcode categories that contain relevant opcodes.\n", + " - Ensure that each opcode matches its intended Scratch functionality.\n", + " - If feedback suggests major change, **rethink the entire plan** for the affected sprite(s).\n", + " - If feedback is minor, make precise, minimal improvements only.\n", + "\"\"\n", + " try:\n", + " response = agent.invoke({\"messages\": [{\"role\": \"user\", \"content\": refinement_prompt}]})\n", + " raw_response = response[\"messages\"][-1].content#strip_noise(response[\"messages\"][-1].content)\n", + " logger.info(f\"Raw response from LLM [RefinedPlannerNode]: {raw_response[:500]}...\")\n", + " # json debugging and solving\n", + " try:\n", + " refined_plan = extract_json_from_llm_response(raw_response)\n", + " except json.JSONDecodeError as error_json:\n", + " logger.error(\"Failed to extract JSON from LLM response. Attempting to correct the response.\")\n", + " # Use the JSON resolver agent to fix the response\n", + " correction_prompt = (\n", + " \"Your task is to correct the provided JSON string to ensure it is **syntactically perfect and adheres strictly to JSON rules**.\\n\"\n", + " \"Carefully review the JSON for any errors, especially focusing on the reported error at:\\n\"\n", + " f\"- **Error Details**: {error_json}\\n\\n\"\n", + " \"**Strict Instructions for your response:**\\n\"\n", + " \"1. **ONLY** output the corrected JSON. Do not include any other text, comments, or explanations outside the JSON.\\n\"\n", + " \"2. Ensure all property names (keys) are enclosed in **double quotes**.\\n\"\n", + " \"3. Ensure string values are correctly enclosed in **double quotes** and any internal special characters (like newlines `\\\\n`, tabs `\\\\t`, backslashes `\\\\\\\\`, or double quotes `\\\\`) are properly **escaped**.\\n\"\n", + " \"4. IN `logic` field make sure content enclosed in **double quotes** should not have invalid **double quotes**, **eliminate** all quotes inside the content if any. \"\n", + " \"4. Verify that there are **no extra commas**, especially between key-value pairs or after the last element in an object or array.\\n\"\n", + " \"5. Ensure proper nesting and matching of curly braces `{}` and square brackets `[]`.\\n\"\n", + " \"6. **Crucially, remove any extraneous characters or duplicate closing braces outside the main JSON object.**\\n\" # Added instruction\n", + " \"7. The corrected JSON must be a **complete and valid** JSON object.\\n\\n\"\n", + " \"Here is the problematic JSON string to correct:\\n\"\n", + " \"```json\\n\"\n", + " f\"{raw_response}\\n\"\n", + " \"```\\n\"\n", + " \"Corrected JSON:\\n\"\n", + " )\n", + " correction_response = agent_json_resolver.invoke({\"messages\": [{\"role\": \"user\", \"content\": correction_prompt}]})\n", + " print(f\"[JSON CORRECTOR RESPONSE AT REFINEPLANNER ]: {correction_response['messages'][-1].content}\")\n", + " refined_plan = extract_json_from_llm_response(correction_response[\"messages\"][-1].content)#strip_noise(correction_response[\"messages\"][-1].content))\n", + " logger.info(\"Refined plan corrected by JSON resolver agent.\")\n", + " \n", + " if refined_plan:\n", + " #state[\"action_plan\"] = refined_plan.get(\"action_overall_flow\", {}) # Update to the key 'action_overall_flow' [error]\n", + " state[\"action_plan\"] = refined_plan #.get(\"action_overall_flow\", {}) # Update the main the prompt includes updated only\n", + " logger.info(\"Action plan refined by RefinedPlannerNode.\")\n", + " else:\n", + " logger.warning(\"RefinedPlannerNode did not return a valid 'action_overall_flow' structure. Keeping previous plan.\")\n", + " print(\"[Refined Action Plan]:\", json.dumps(state[\"action_plan\"], indent=2))\n", + " #print(\"[current state after refinement]:\", json.dumps(state, indent=2))\n", + " \n", + " # with open(\"debug_state.json\", \"w\", encoding=\"utf-8\") as f:\n", + " # json.dump(state, f, indent=2, ensure_ascii=False)\n", + " \n", + " return state\n", + " except Exception as e:\n", + " logger.error(f\"Error in RefinedPlannerNode: {e}\")\n", + " raise\n", + "\n", + "# Node 4: opcode counter node\n", + "def plan_opcode_counter_node(state: Dict[str, Any]) -> Dict[str, Any]:\n", + " \"\"\n", + " For each plan in state[\"action_plan\"], calls the LLM agent\n", + " to analyze the `logic` string and return a list of {opcode, count} for each category.\n", + " \"\"\n", + " logger.info(\"=== Running OPCODE COUTER LOGIC with LLM counts ===\")\n", + " #game_description = state.get(\"description\", \"No game description provided.\")\n", + " sprite_name = {}\n", + " project_json_targets = state.get(\"project_json\", {}).get(\"targets\", [])\n", + " for target in project_json_targets:\n", + " sprite_name[target[\"name\"]] = target[\"name\"]\n", + " \n", + " action_flow = state.get(\"action_plan\", {}) \n", + " \n", + " if action_flow.get(\"action_overall_flow\", {}) == {}:\n", + " plan_data = action_flow.items()\n", + " else:\n", + " plan_data = action_flow.get(\"action_overall_flow\", {}).items()\n", + " \n", + " refined_flow: Dict[str, Any] = {}\n", + " for sprite, sprite_data in plan_data:\n", + " refined_plans = []\n", + " for plan in sprite_data.get(\"plans\", []):\n", + " logic = plan.get(\"logic\", \")\n", + " event = plan.get(\"event\", \")\n", + " \n", + " # These are for guiding the LLM, not for the final output format directly\n", + " opcodes_from_plan = {\n", + " \"motion\": plan.get(\"motion\", []),\n", + " \"control\": plan.get(\"control\", []),\n", + " \"operator\": plan.get(\"operator\", []),\n", + " \"sensing\": plan.get(\"sensing\", []),\n", + " \"looks\": plan.get(\"looks\", []),\n", + " \"sounds\": plan.get(\"sounds\", []),\n", + " \"events\": plan.get(\"events\", []) + ([event] if isinstance(event, str) else []),\n", + " \"data\": plan.get(\"data\", []),\n", + " }\n", + "\n", + " refinement_prompt = f\"\"\n", + "You are a Scratch 3.0 expert with deep knowledge of block types, nesting and stack relationships.\n", + "Your job: read the plan logic below and decide exactly which blocks (and how many of each) are required to implement it.\n", + "Review the following plan for '{sprite}' triggered by '{event}'.\n", + "--- Scratch 3.0 Block Reference ---\n", + " ### Hat Blocks\n", + " Description: {hat_description}\n", + " Blocks:\n", + " {hat_opcodes_functionalities}\n", + "\n", + " ### Boolean Blocks\n", + " Description: {boolean_description}\n", + " Blocks:\n", + " {boolean_opcodes_functionalities}\n", + "\n", + " ### C Blocks\n", + " Description: {c_description}\n", + " Blocks:\n", + " {c_opcodes_functionalities}\n", + "\n", + " ### Cap Blocks\n", + " Description: {cap_description}\n", + " Blocks:\n", + " {cap_opcodes_functionalities}\n", + "\n", + " ### Reporter Blocks\n", + " Description: {reporter_description}\n", + " Blocks:\n", + " {reporter_opcodes_functionalities}\n", + "\n", + " ### Stack Blocks\n", + " Description: {stack_description}\n", + " Blocks:\n", + " {stack_opcodes_functionalities}\n", + "-----------------------------------\n", + "Current Plan Details:\n", + "- Event (Hat Block Opcode): {event}\n", + "- Associated Opcodes by Category: {json.dumps(opcodes_from_plan, indent=2)}\n", + "\n", + "── Game Context ──\n", + "Sprite: \"{sprite}\"\n", + "\n", + "── Current Plan ──\n", + "Event (hat block): {event}\n", + "Logic (pseudo-Scratch): {logic}\n", + "Plan : {plan}\n", + "\n", + "── Opcode Candidates ──\n", + "Motion: {opcodes_from_plan[\"motion\"]}\n", + "Control: {opcodes_from_plan[\"control\"]}\n", + "Operator: {opcodes_from_plan[\"operator\"]}\n", + "Sensing: {opcodes_from_plan[\"sensing\"]}\n", + "Looks: {opcodes_from_plan[\"looks\"]}\n", + "Sounds: {opcodes_from_plan[\"sounds\"]}\n", + "Events: {opcodes_from_plan[\"events\"]}\n", + "Data: {opcodes_from_plan[\"data\"]}\n", + "\n", + "── Your Task ──\n", + "1. Analyze the “Logic” steps and choose exactly which opcodes are needed. \n", + "2. Use exact opcodes from the given Scratch 3.0 Block Reference and verfiy the proper opcode are used available in the Scratch 3.0 Block Reference.\n", + "3. Return a top-level JSON object with a single key: \"opcode_counts\".\n", + "4. The value of \"opcode_counts\" should be a list of objects, where each object has \"opcode\": \"\" and \"count\": .\n", + "5. Ensure the list includes the hat block for this plan (e.g., event_whenflagclicked, event_whenkeypressed, event_whenbroadcastreceived) with a count of 1.\n", + "6. The order of opcodes within the \"opcode_counts\" list does not matter.\n", + "7. If any plan logic is None do not generate the opcode_counts for it.\n", + "8. Use only double quotes and ensure valid JSON.\n", + "\n", + "Example output:\n", + "**example 1**\n", + "```json \n", + "{{\n", + "\"opcode_counts\":[\n", + " {{\"opcode\":\"motion_gotoxy\",\"count\":1}},\n", + " {{\"opcode\":\"control_forever\",\"count\":1}},\n", + " {{\"opcode\":\"control_if\",\"count\":1}},\n", + " {{\"opcode\":\"looks_switchbackdropto\",\"count\":1}},\n", + " {{\"opcode\":\"event_whenflagclicked\",\"count\":1}},\n", + " {{\"opcode\":\"event_broadcast\",\"count\":1}},\n", + " {{\"opcode\":\"data_setvariableto\",\"count\":2}},\n", + " {{\"opcode\":\"data_showvariable\",\"count\":2}}\n", + " ] \n", + "}} \n", + "```\n", + "**example 2**\n", + "```json \n", + "{{ \n", + "\"opcode_counts\":[\n", + " {{\"opcode\":\"motion_gotoxy\",\"count\":1}},\n", + " {{\"opcode\":\"motion_glidesecstoxy\",\"count\":1}},\n", + " {{\"opcode\":\"motion_xposition\",\"count\":1}},\n", + " {{\"opcode\":\"motion_setx\",\"count\":1}},\n", + " {{\"opcode\":\"control_forever\",\"count\":1}},\n", + " {{\"opcode\":\"control_if\",\"count\":2}},\n", + " {{\"opcode\":\"operator_lt\",\"count\":1}},\n", + " {{\"opcode\":\"sensing_istouching\",\"count\":1}},\n", + " {{\"opcode\":\"event_whenflagclicked\",\"count\":1}},\n", + " {{\"opcode\":\"event_broadcast\",\"count\":1}}\n", + " ]\n", + "}}\n", + "```\n", + "\"\" \n", + " try:\n", + " response = agent.invoke({\"messages\": [{\"role\": \"user\", \"content\": refinement_prompt}]})\n", + " llm_output = response[\"messages\"][-1].content\n", + " llm_json = extract_json_from_llm_response(llm_output) \n", + " logger.info(f\"Successfully analyze the opcode requirement for {sprite} - {event}.\")\n", + "\n", + " except json.JSONDecodeError as error_json:\n", + " logger.error(f\"JSON Decode Error for {sprite} - {event}: {error_json}. Attempting correction.\")\n", + " correction_prompt = (\n", + " \"Your task is to correct the provided JSON string to ensure it is **syntactically perfect and adheres strictly to JSON rules**.\\n\"\n", + " \"It must be a JSON object with a single key `opcode_counts` containing a list of objects like {{'opcode': '', 'count': }}.\\n\"\n", + " f\"- **Error Details**: {error_json}\\n\\n\"\n", + " \"**Strict Instructions for your response:**\\n\"\n", + " \"1. **ONLY** output the corrected JSON. Do not include any other text or explanations.\\n\"\n", + " \"2. Ensure all keys and string values are enclosed in **double quotes**. Escape internal quotes (`\\\\`).\\n\"\n", + " \"3. No trailing commas. Correct nesting.\\n\\n\"\n", + " \"Here is the problematic JSON string to correct:\\n\"\n", + " f\"```json\\n{llm_output}\\n```\\n\"\n", + " \"Corrected JSON:\\n\"\n", + " )\n", + " try:\n", + " correction_response = agent_json_resolver.invoke({\"messages\": [{\"role\": \"user\", \"content\": correction_prompt}]})\n", + " llm_json = extract_json_from_llm_response(correction_response[\"messages\"][-1].content)\n", + " logger.info(f\"Successfully corrected JSON output for {sprite} - {event}.\")\n", + " except Exception as e_corr:\n", + " logger.error(f\"Failed to correct JSON output for {sprite} - {event} even after retry: {e_corr}\")\n", + " continue \n", + "\n", + " # Directly use the 'opcode_counts' list from the LLM's output\n", + " plan[\"opcode_counts\"] = llm_json.get(\"opcode_counts\", [])\n", + " \n", + " # Optionally, you can remove the individual category lists from the plan\n", + " # if they are no longer needed after the LLM provides the consolidated list.\n", + " # for key in [\"motion\", \"control\", \"operator\", \"sensing\", \"looks\", \"sounds\", \"events\", \"data\"]:\n", + " # if key in plan:\n", + " # del plan[key]\n", + "\n", + " refined_plans.append(plan)\n", + "\n", + " refined_flow[sprite] = {\n", + " \"description\": sprite_data.get(\"description\", \"),\n", + " \"plans\": refined_plans\n", + " }\n", + " \n", + " if refined_flow:\n", + " state[\"action_plan\"] = refined_flow\n", + " logger.info(\"logic aligned by logic_aligner_Node.\")\n", + " \n", + " state[\"temporary_node\"] = refined_flow\n", + " #state[\"temporary_node\"] = refined_flow\n", + " print(f\"[OPCODE COUTER LOGIC]: {refined_flow}\")\n", + " logger.info(\"=== OPCODE COUTER LOGIC completed ===\")\n", + " return state\n", + "\n", + "# Node 5: block_builder_node\n", + "def overall_block_builder_node_2(state: GameState):\n", + " logger.info(\"--- Running OverallBlockBuilderNode ---\")\n", + " print(\"--- Running OverallBlockBuilderNode ---\")\n", + " project_json = state[\"project_json\"]\n", + " targets = project_json[\"targets\"]\n", + " # --- Sprite and Stage Target Mapping ---\n", + " sprite_map = {target[\"name\"]: target for target in targets if not target[\"isStage\"]}\n", + " stage_target = next((target for target in targets if target[\"isStage\"]), None)\n", + " if stage_target:\n", + " sprite_map[stage_target[\"name\"]] = stage_target\n", + "\n", + " action_plan = state.get(\"action_plan\", {})\n", + " print(\"[Overall Action Plan received at the block generator]:\", json.dumps(action_plan, indent=2))\n", + " if not action_plan:\n", + " logger.warning(\"No action plan found in state. Skipping OverallBlockBuilderNode.\")\n", + " return state\n", + "\n", + " # Initialize offsets for script placement on the Scratch canvas\n", + " script_y_offset = {}\n", + " script_x_offset_per_sprite = {name: 0 for name in sprite_map.keys()}\n", + "\n", + " # This handles potential variations in the action_plan structure.\n", + " if action_plan.get(\"action_overall_flow\", {}) == {}:\n", + " plan_data = action_plan.items()\n", + " else:\n", + " plan_data = action_plan.get(\"action_overall_flow\", {}).items()\n", + "\n", + " # --- Extract global project context for LLM ---\n", + " all_sprite_names = list(sprite_map.keys())\n", + " all_variable_names = {}\n", + " all_list_names = {}\n", + " all_broadcast_messages = {}\n", + "\n", + " for target in targets:\n", + " for var_id, var_info in target.get(\"variables\", {}).items():\n", + " all_variable_names[var_info[0]] = var_id # Store name -> ID mapping (e.g., \"myVariable\": \"myVarId123\")\n", + " for list_id, list_info in target.get(\"lists\", {}).items():\n", + " all_list_names[list_info[0]] = list_id # Store name -> ID mapping\n", + " for broadcast_id, broadcast_name in target.get(\"broadcasts\", {}).items():\n", + " all_broadcast_messages[broadcast_name] = broadcast_id # Store name -> ID mapping\n", + "\n", + " # --- Process each sprite's action plan ---\n", + " for sprite_name, sprite_actions_data in plan_data:\n", + " if sprite_name in sprite_map:\n", + " current_sprite_target = sprite_map[sprite_name]\n", + " if \"blocks\" not in current_sprite_target:\n", + " current_sprite_target[\"blocks\"] = {}\n", + "\n", + " if sprite_name not in script_y_offset:\n", + " script_y_offset[sprite_name] = 0\n", + "\n", + " for plan_entry in sprite_actions_data.get(\"plans\", []):\n", + " logic_sequence = str(plan_entry[\"logic\"])\n", + " opcode_counts = plan_entry.get(\"opcode_counts\", {})\n", + " refined_indent_logic = format_scratch_pseudo_code(logic_sequence)\n", + " print(f\"\\n--------------------------- refined indent logic: {refined_indent_logic}-------------------------------\\n\")\n", + " try:\n", + " generated_blocks = block_builder(opcode_counts, refined_indent_logic)\n", + " \n", + " # Ensure generated_blocks is a dictionary\n", + " if not isinstance(generated_blocks, dict):\n", + " logger.error(f\"block_builder for sprite '{sprite_name}' returned non-dict type: {type(generated_blocks)}. Skipping block update.\")\n", + " continue # Skip to next plan_entry if output is not a dictionary\n", + "\n", + " if \"blocks\" in generated_blocks and isinstance(generated_blocks[\"blocks\"], dict):\n", + " logger.warning(f\"LLM returned nested 'blocks' key for {sprite_name}. Unwrapping.\")\n", + " generated_blocks = generated_blocks[\"blocks\"]\n", + "\n", + " # Update block positions for top-level script\n", + " for block_id, block_data in generated_blocks.items():\n", + " if block_data.get(\"topLevel\"):\n", + " block_data[\"x\"] = script_x_offset_per_sprite.get(sprite_name, 0)\n", + " block_data[\"y\"] = script_y_offset[sprite_name]\n", + " script_y_offset[sprite_name] += 150 # Increment for next script\n", + "\n", + " current_sprite_target[\"blocks\"].update(generated_blocks)\n", + " print(f\"[current_sprite_target block updated]: {current_sprite_target['blocks']}\")\n", + " state[\"iteration_count\"] = 0\n", + " logger.info(f\"Action blocks added for sprite '{sprite_name}' by OverallBlockBuilderNode.\")\n", + " except Exception as e:\n", + " logger.error(f\"Error generating blocks for sprite '{sprite_name}': {e}\")\n", + " # Consider adding more specific error handling here if a malformed output\n", + " # from block_builder should cause a specific state change, but generally\n", + " # avoid nulling the entire project_json.\n", + " \n", + " state[\"project_json\"] = project_json\n", + " # with open(\"debug_state.json\", \"w\", encoding=\"utf-8\") as f:\n", + " # json.dump(state, f, indent=2, ensure_ascii=False)\n", + " \n", + " return state\n", + "\n", + "# Node 6: variable adder node\n", + "def variable_adder_node(state: GameState):\n", + " project_json = state[\"project_json\"]\n", + " try:\n", + " updated_project_json = variable_adder_main(project_json)\n", + " if updated_project_json is not None:\n", + " print(\"Variable added inside the project successfully!\")\n", + " state[\"project_json\"]=updated_project_json\n", + " else:\n", + " print(\"Variable adder unable to add any variable inside the project!\")\n", + " state[\"project_json\"]=project_json\n", + " return state\n", + " except Exception as e:\n", + " logger.error(f\"Error in variable adder node while updating project_json': {e}\")\n", + " raise\n", + "\n", + "def delay_for_tpm_node(state: GameState):\n", + " logger.info(\"--- Running DelayForTPMNode ---\")\n", + " time.sleep(1) # Adjust the delay as needed\n", + " logger.info(\"Delay completed.\")\n", + " return state\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4b99c3eb", + "metadata": {}, + "outputs": [ + { + "ename": "NameError", + "evalue": "name 'StateGraph' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[31m---------------------------------------------------------------------------\u001b[39m", + "\u001b[31mNameError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[1]\u001b[39m\u001b[32m, line 2\u001b[39m\n\u001b[32m 1\u001b[39m \u001b[38;5;66;03m# Build the LangGraph workflow\u001b[39;00m\n\u001b[32m----> \u001b[39m\u001b[32m2\u001b[39m workflow = \u001b[43mStateGraph\u001b[49m(GameState)\n\u001b[32m 4\u001b[39m \u001b[38;5;66;03m# Add all nodes to the workflow\u001b[39;00m\n\u001b[32m 5\u001b[39m workflow.add_node(\u001b[33m\"\u001b[39m\u001b[33mtime_delay_1\u001b[39m\u001b[33m\"\u001b[39m, delay_for_tpm_node)\n", + "\u001b[31mNameError\u001b[39m: name 'StateGraph' is not defined" + ] + } + ], + "source": [ + "# Build the LangGraph workflow\n", + "workflow = StateGraph(GameState)\n", + "\n", + "# Add all nodes to the workflow\n", + "workflow.add_node(\"time_delay_1\", delay_for_tpm_node)\n", + "workflow.add_node(\"pseudo_generator\", pseudo_generator_node)\n", + "workflow.add_node(\"page_processed\", processed_page_node)\n", + "\n", + "workflow.set_entry_point(\"time_delay_1\")\n", + "workflow.add_edge(\"time_delay_1\",\"pseudo_generator\")\n", + "workflow.add_edge(\"pseudo_generator\",\"page_processed\")\n", + "def decide_next_step(state: GameState):\n", + " if state.get(\"processing\", False):\n", + " return \"pseudo_generator\"\n", + " else:\n", + " return END\n", + "\n", + "workflow.add_conditional_edges(\n", + " \"page_processed\",\n", + " decide_next_step,\n", + " {\n", + " \"pseudo_generator\": \"pseudo_generator\", \n", + " \"END\": END \n", + " }\n", + ")\n", + "\n", + "app_graph = workflow.compile()\n", + "\n", + "initial_state_dict = {\n", + " \"project_json\": \"project_skeleton\",\n", + " \"description\": \"The pseudo code for the script\",\n", + " \"project_id\": 1,\n", + " # \"project_image\": img_b64,\n", + " \"project_image\": images,\n", + " \"action_plan\": {},\n", + " \"pseudo_code\": {},\n", + " \"temporary_node\": {},\n", + " \"processing\":True,\n", + " \"page_count\": 0,\n", + " \"temp_pseudo_code\":{},\n", + " }\n", + "\n", + " \n", + "final_state_dict = app_graph.invoke(initial_state_dict) # Pass dictionary" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "id": "9e0fdd1e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[{'refined_logic': {'name_variable': 'Bear',\n", + " 'pseudocode': 'when green flag clicked\\n show\\n switch backdrop to [Blue Sky v]\\n go to x: (-120) y: (-120)\\n broadcast [Game Start v]\\n forever\\nend\\n\\nwhen [down arrow v] key pressed\\n switch costume to [bend v]\\nend'}},\n", + " {'refined_logic': {'name_variable': 'Batter',\n", + " 'pseudocode': 'when green flag clicked\\n show\\n switch backdrop to [Blue Sky v]\\n go to x: (-120) y: (-120)\\n broadcast [Game Start v]\\n forever\\n if > then\\n switch costume to [Cat-a v]\\n wait (0.1) seconds\\n switch costume to [Cat-b v]\\n wait (0.1) seconds\\n end\\n end\\n when I receive [Game Over v]\\n hide\\n end'}},\n", + " {'refined_logic': {'name_variable': 'cat',\n", + " 'pseudocode': 'when [space v] key pressed\\n start sound [Zoop v]\\n glide (0.4) secs to x: (-120) y: (20)\\n glide (0.4) secs to x: (-120) y: (-120)\\nwhen I receive [Game Start v]\\n forever\\n hide\\n switch costume to (pick random (1) to (6))\\n go to x: (251) y: (-150)\\n show\\n glide (pick random (0.8) to (2)) secs to x: (-250) y: (-150)\\n end\\nend'}},\n", + " {'refined_logic': {'name_variable': 'Centaur',\n", + " 'pseudocode': 'when I receive [Game Over v]\\nstart sound [Ricochet v]\\nsay [Game Over] for (0.5) seconds\\nif > then\\nswitch costume to [Cat-a v]\\nwait (0.1) seconds\\nswitch costume to [Cat-b v]\\nwait (0.1) seconds\\nend\\nend'}},\n", + " {'refined_logic': {'name_variable': 'Beetle',\n", + " 'pseudocode': 'when green flag clicked\\n set [score v] to (0)\\n forever\\n wait (1) seconds\\n change [score v] by (1)\\nend'}},\n", + " {'refined_logic': {'name_variable': 'Beetle',\n", + " 'pseudocode': 'when I receive [Game Start v]\\nforever\\n if then\\n hide\\n switch backdrop to [loose v]\\n broadcast [Game Over v]\\n stop [all v]\\n end\\nend\\n'}}]" + ] + }, + "execution_count": 59, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "final_state_dict[\"pseudo_code\"]" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "id": "5dfabcb9", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0\n", + "1\n", + "2\n", + "3\n", + "4\n", + "5\n" + ] + } + ], + "source": [ + "for i in range(6):\n", + " if i<6:\n", + " print(i)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "19f8b1bc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "PDF has 6 pages\n" + ] + } + ], + "source": [ + "import tempfile\n", + "import io\n", + "from io import BytesIO\n", + "from pdf2image import convert_from_path, convert_from_bytes\n", + "\n", + "def convert_pdf_stream_to_images(pdf_stream: io.BytesIO, dpi=300):\n", + " # Ensure we are at the start of the stream\n", + " pdf_stream.seek(0)\n", + " \n", + " with tempfile.NamedTemporaryFile(delete=False, suffix=\".pdf\") as tmp_pdf:\n", + " tmp_pdf.write(pdf_stream.read())\n", + " tmp_pdf_path = tmp_pdf.name\n", + " \n", + " # Now use convert_from_path on the temp file\n", + " images = convert_from_path(tmp_pdf_path, dpi=dpi)\n", + " return images\n", + "\n", + "pdf_file_path = r\"D:\\DEV PATEL\\2025\\Scratch_Vision\\images\\code_blocks_with_script_for_written.pdf\"\n", + "with open(pdf_file_path, \"rb\") as pdf_file:\n", + " pdf_bytes = pdf_file.read()\n", + " \n", + "pdf_stream = io.BytesIO(pdf_bytes)\n", + "\n", + "\n", + "if isinstance(pdf_stream, io.BytesIO):\n", + " images = convert_pdf_stream_to_images(pdf_stream, dpi=300)\n", + "else:\n", + " images = convert_from_path(pdf_stream, dpi=300)\n", + "\n", + "num_pages = len(images)\n", + "print(f\"PDF has {num_pages} pages\")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "07a42ad2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "if 0\n", + "node processed 0\n", + "else 1\n" + ] + } + ], + "source": [ + "processing=True\n", + "i=0\n", + "while processing==True:\n", + " if i<1:\n", + " print(\"if\",i)\n", + " processing=True\n", + " print(\"node processed\",i)\n", + " i+=1\n", + " else: \n", + " processing=False\n", + " print(\"else\",i)" + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "id": "2ed66744", + "metadata": {}, + "outputs": [ + { + "ename": "IndexError", + "evalue": "list index out of range", + "output_type": "error", + "traceback": [ + "\u001b[31m---------------------------------------------------------------------------\u001b[39m", + "\u001b[31mIndexError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[68]\u001b[39m\u001b[32m, line 1\u001b[39m\n\u001b[32m----> \u001b[39m\u001b[32m1\u001b[39m \u001b[43mimages\u001b[49m\u001b[43m[\u001b[49m\u001b[32;43m6\u001b[39;49m\u001b[43m]\u001b[49m\n", + "\u001b[31mIndexError\u001b[39m: list index out of range" + ] + } + ], + "source": [ + "images[6]" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "id": "6f4d5d9b", + "metadata": {}, + "outputs": [], + "source": [ + "lit = [{1:2},{1:4},{4:6}]\n", + "lit +=[{7:2}]" + ] + }, + { + "cell_type": "code", + "execution_count": 75, + "id": "1ff63396", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[{1: 2}, {1: 4}, {4: 6}, {7: 2}]" + ] + }, + "execution_count": 75, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lit" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6cb9ea0d", + "metadata": {}, + "outputs": [], + "source": [ + "def processed_page_node():\n", + " image = state.get(\"project_image\", \") \n", + " cnt =state[\"page_count\"]\n", + " print(f\"the page processed for page {cnt}\")\n", + " if cnt \u001b[39m\u001b[32m218\u001b[39m dist_matrix, filenames = \u001b[43mbuild_and_save_dreamsim_heatmap\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 219\u001b[39m \u001b[43m \u001b[49m\u001b[43mfolder\u001b[49m\u001b[43m=\u001b[49m\u001b[43mfolder\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 220\u001b[39m \u001b[43m \u001b[49m\u001b[43mmode\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mfast\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 221\u001b[39m \u001b[43m \u001b[49m\u001b[43mcache_dir\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43mr\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mD:\u001b[39;49m\u001b[33;43m\\\u001b[39;49m\u001b[33;43mDEV PATEL\u001b[39;49m\u001b[33;43m\\\u001b[39;49m\u001b[33;43m2025\u001b[39;49m\u001b[33;43m\\\u001b[39;49m\u001b[33;43mscratch_VLM\u001b[39;49m\u001b[33;43m\\\u001b[39;49m\u001b[33;43mscratch_agent\u001b[39;49m\u001b[33;43m\\\u001b[39;49m\u001b[33;43m.cache\u001b[39;49m\u001b[33;43m\\\u001b[39;49m\u001b[33;43mdreamsim\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 222\u001b[39m \u001b[43m \u001b[49m\u001b[43mpretrained\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 223\u001b[39m \u001b[43m \u001b[49m\u001b[43mmax_images\u001b[49m\u001b[43m=\u001b[49m\u001b[32;43m200\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# or None\u001b[39;49;00m\n\u001b[32m 224\u001b[39m \u001b[43m \u001b[49m\u001b[43mout_png\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mheatmap.png\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 225\u001b[39m \u001b[43m \u001b[49m\u001b[43mout_csv\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mdistances.csv\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 226\u001b[39m \u001b[43m \u001b[49m\u001b[43muse_cuda\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# auto\u001b[39;49;00m\n\u001b[32m 227\u001b[39m \u001b[43m \u001b[49m\u001b[43mannotate\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 228\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 229\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33m\"\u001b[39m\u001b[33mDone. Saved heatmap and CSV. Matrix shape:\u001b[39m\u001b[33m\"\u001b[39m, dist_matrix.shape)\n", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[3]\u001b[39m\u001b[32m, line 184\u001b[39m, in \u001b[36mbuild_and_save_dreamsim_heatmap\u001b[39m\u001b[34m(folder, mode, cache_dir, pretrained, max_images, out_png, out_csv, use_cuda, annotate, model_preloaded)\u001b[39m\n\u001b[32m 182\u001b[39m model, preprocess = model_preloaded\n\u001b[32m 183\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m184\u001b[39m model, preprocess = \u001b[43mload_dreamsim_model\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpretrained\u001b[49m\u001b[43m=\u001b[49m\u001b[43mpretrained\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcache_dir\u001b[49m\u001b[43m=\u001b[49m\u001b[43mcache_dir\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 186\u001b[39m \u001b[38;5;66;03m# 3) device logic\u001b[39;00m\n\u001b[32m 187\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m use_cuda \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[3]\u001b[39m\u001b[32m, line 57\u001b[39m, in \u001b[36mload_dreamsim_model\u001b[39m\u001b[34m(pretrained, cache_dir)\u001b[39m\n\u001b[32m 53\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mload_dreamsim_model\u001b[39m(pretrained: \u001b[38;5;28mbool\u001b[39m = \u001b[38;5;28;01mTrue\u001b[39;00m, cache_dir: \u001b[38;5;28mstr\u001b[39m = \u001b[33m\"\u001b[39m\u001b[33m~/.cache\u001b[39m\u001b[33m\"\u001b[39m):\n\u001b[32m 54\u001b[39m \u001b[38;5;250m \u001b[39m\u001b[33;03m\"\"\"\u001b[39;00m\n\u001b[32m 55\u001b[39m \u001b[33;03m Returns model, preprocess\u001b[39;00m\n\u001b[32m 56\u001b[39m \u001b[33;03m \"\"\"\u001b[39;00m\n\u001b[32m---> \u001b[39m\u001b[32m57\u001b[39m model, preprocess = \u001b[43mdreamsim\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpretrained\u001b[49m\u001b[43m=\u001b[49m\u001b[43mpretrained\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcache_dir\u001b[49m\u001b[43m=\u001b[49m\u001b[43mcache_dir\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 58\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m model, preprocess\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\dreamsim\\model.py:275\u001b[39m, in \u001b[36mdreamsim\u001b[39m\u001b[34m(pretrained, device, cache_dir, normalize_embeds, dreamsim_type, use_patch_model)\u001b[39m\n\u001b[32m 273\u001b[39m \u001b[38;5;66;03m# initialize PerceptualModel and load weights\u001b[39;00m\n\u001b[32m 274\u001b[39m model_list = dreamsim_args[\u001b[33m'\u001b[39m\u001b[33mmodel_config\u001b[39m\u001b[33m'\u001b[39m][dreamsim_type][\u001b[33m'\u001b[39m\u001b[33mmodel_type\u001b[39m\u001b[33m'\u001b[39m].split(\u001b[33m\"\u001b[39m\u001b[33m,\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m--> \u001b[39m\u001b[32m275\u001b[39m ours_model = \u001b[43mPerceptualModel\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mdreamsim_args\u001b[49m\u001b[43m[\u001b[49m\u001b[33;43m'\u001b[39;49m\u001b[33;43mmodel_config\u001b[39;49m\u001b[33;43m'\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m[\u001b[49m\u001b[43mdreamsim_type\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mload_dir\u001b[49m\u001b[43m=\u001b[49m\u001b[43mcache_dir\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 276\u001b[39m \u001b[43m \u001b[49m\u001b[43mnormalize_embeds\u001b[49m\u001b[43m=\u001b[49m\u001b[43mnormalize_embeds\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 278\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m dreamsim_type == \u001b[33m'\u001b[39m\u001b[33mensemble\u001b[39m\u001b[33m'\u001b[39m:\n\u001b[32m 279\u001b[39m tag = \u001b[33m'\u001b[39m\u001b[33mensemble_\u001b[39m\u001b[33m'\u001b[39m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\dreamsim\\model.py:65\u001b[39m, in \u001b[36mPerceptualModel.__init__\u001b[39m\u001b[34m(self, model_type, feat_type, stride, hidden_size, lora, baseline, load_dir, normalize_embeds, device, **kwargs)\u001b[39m\n\u001b[32m 62\u001b[39m \u001b[38;5;28mself\u001b[39m.baseline = baseline\n\u001b[32m 63\u001b[39m \u001b[38;5;28;01mfor\u001b[39;00m model_type, feat_type, stride \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mzip\u001b[39m(\u001b[38;5;28mself\u001b[39m.model_list, \u001b[38;5;28mself\u001b[39m.feat_type_list, \u001b[38;5;28mself\u001b[39m.stride_list):\n\u001b[32m 64\u001b[39m \u001b[38;5;28mself\u001b[39m.extractor_list.append(\n\u001b[32m---> \u001b[39m\u001b[32m65\u001b[39m \u001b[43mViTExtractor\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel_type\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstride\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mload_dir\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 66\u001b[39m )\n\u001b[32m 67\u001b[39m extract_fn, num_feats = \u001b[38;5;28mself\u001b[39m._get_extract_fn(model_type, feat_type)\n\u001b[32m 68\u001b[39m \u001b[38;5;28mself\u001b[39m.extract_feats_list.append(extract_fn)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\dreamsim\\feature_extraction\\extractor.py:48\u001b[39m, in \u001b[36mViTExtractor.__init__\u001b[39m\u001b[34m(self, model_type, stride, load_dir, device)\u001b[39m\n\u001b[32m 46\u001b[39m \u001b[38;5;28mself\u001b[39m.proj = \u001b[38;5;28mself\u001b[39m.model[\u001b[32m1\u001b[39m]\n\u001b[32m 47\u001b[39m \u001b[38;5;28mself\u001b[39m.model = \u001b[38;5;28mself\u001b[39m.model[\u001b[32m0\u001b[39m]\n\u001b[32m---> \u001b[39m\u001b[32m48\u001b[39m \u001b[38;5;28mself\u001b[39m.model = \u001b[43mViTExtractor\u001b[49m\u001b[43m.\u001b[49m\u001b[43mpatch_vit_resolution\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstride\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstride\u001b[49m\u001b[43m)\u001b[49m\u001b[43m.\u001b[49m\u001b[43meval\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m.\u001b[49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 49\u001b[39m \u001b[38;5;28mself\u001b[39m.p = \u001b[38;5;28mself\u001b[39m.model.patch_embed.patch_size\n\u001b[32m 50\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mtype\u001b[39m(\u001b[38;5;28mself\u001b[39m.p) \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28mtuple\u001b[39m:\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1355\u001b[39m, in \u001b[36mModule.to\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1352\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m 1353\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m\n\u001b[32m-> \u001b[39m\u001b[32m1355\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_apply\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconvert\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:915\u001b[39m, in \u001b[36mModule._apply\u001b[39m\u001b[34m(self, fn, recurse)\u001b[39m\n\u001b[32m 913\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m recurse:\n\u001b[32m 914\u001b[39m \u001b[38;5;28;01mfor\u001b[39;00m module \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m.children():\n\u001b[32m--> \u001b[39m\u001b[32m915\u001b[39m \u001b[43mmodule\u001b[49m\u001b[43m.\u001b[49m\u001b[43m_apply\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfn\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 917\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mcompute_should_use_set_data\u001b[39m(tensor, tensor_applied):\n\u001b[32m 918\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m torch._has_compatible_shallow_copy_type(tensor, tensor_applied):\n\u001b[32m 919\u001b[39m \u001b[38;5;66;03m# If the new tensor has compatible tensor type as the existing tensor,\u001b[39;00m\n\u001b[32m 920\u001b[39m \u001b[38;5;66;03m# the current behavior is to change the tensor in-place using `.data =`,\u001b[39;00m\n\u001b[32m (...)\u001b[39m\u001b[32m 925\u001b[39m \u001b[38;5;66;03m# global flag to let the user control whether they want the future\u001b[39;00m\n\u001b[32m 926\u001b[39m \u001b[38;5;66;03m# behavior of overwriting the existing tensor or not.\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:915\u001b[39m, in \u001b[36mModule._apply\u001b[39m\u001b[34m(self, fn, recurse)\u001b[39m\n\u001b[32m 913\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m recurse:\n\u001b[32m 914\u001b[39m \u001b[38;5;28;01mfor\u001b[39;00m module \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m.children():\n\u001b[32m--> \u001b[39m\u001b[32m915\u001b[39m \u001b[43mmodule\u001b[49m\u001b[43m.\u001b[49m\u001b[43m_apply\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfn\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 917\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mcompute_should_use_set_data\u001b[39m(tensor, tensor_applied):\n\u001b[32m 918\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m torch._has_compatible_shallow_copy_type(tensor, tensor_applied):\n\u001b[32m 919\u001b[39m \u001b[38;5;66;03m# If the new tensor has compatible tensor type as the existing tensor,\u001b[39;00m\n\u001b[32m 920\u001b[39m \u001b[38;5;66;03m# the current behavior is to change the tensor in-place using `.data =`,\u001b[39;00m\n\u001b[32m (...)\u001b[39m\u001b[32m 925\u001b[39m \u001b[38;5;66;03m# global flag to let the user control whether they want the future\u001b[39;00m\n\u001b[32m 926\u001b[39m \u001b[38;5;66;03m# behavior of overwriting the existing tensor or not.\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:942\u001b[39m, in \u001b[36mModule._apply\u001b[39m\u001b[34m(self, fn, recurse)\u001b[39m\n\u001b[32m 938\u001b[39m \u001b[38;5;66;03m# Tensors stored in modules are graph leaves, and we don't want to\u001b[39;00m\n\u001b[32m 939\u001b[39m \u001b[38;5;66;03m# track autograd history of `param_applied`, so we have to use\u001b[39;00m\n\u001b[32m 940\u001b[39m \u001b[38;5;66;03m# `with torch.no_grad():`\u001b[39;00m\n\u001b[32m 941\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m torch.no_grad():\n\u001b[32m--> \u001b[39m\u001b[32m942\u001b[39m param_applied = \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[43mparam\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 943\u001b[39m p_should_use_set_data = compute_should_use_set_data(param, param_applied)\n\u001b[32m 945\u001b[39m \u001b[38;5;66;03m# subclasses may have multiple child tensors so we need to use swap_tensors\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1341\u001b[39m, in \u001b[36mModule.to..convert\u001b[39m\u001b[34m(t)\u001b[39m\n\u001b[32m 1334\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m convert_to_format \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m t.dim() \u001b[38;5;129;01min\u001b[39;00m (\u001b[32m4\u001b[39m, \u001b[32m5\u001b[39m):\n\u001b[32m 1335\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m t.to(\n\u001b[32m 1336\u001b[39m device,\n\u001b[32m 1337\u001b[39m dtype \u001b[38;5;28;01mif\u001b[39;00m t.is_floating_point() \u001b[38;5;129;01mor\u001b[39;00m t.is_complex() \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[32m 1338\u001b[39m non_blocking,\n\u001b[32m 1339\u001b[39m memory_format=convert_to_format,\n\u001b[32m 1340\u001b[39m )\n\u001b[32m-> \u001b[39m\u001b[32m1341\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mt\u001b[49m\u001b[43m.\u001b[49m\u001b[43mto\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 1342\u001b[39m \u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1343\u001b[39m \u001b[43m \u001b[49m\u001b[43mdtype\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[43m.\u001b[49m\u001b[43mis_floating_point\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mt\u001b[49m\u001b[43m.\u001b[49m\u001b[43mis_complex\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01melse\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 1344\u001b[39m \u001b[43m \u001b[49m\u001b[43mnon_blocking\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1345\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1346\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mNotImplementedError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[32m 1347\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mstr\u001b[39m(e) == \u001b[33m\"\u001b[39m\u001b[33mCannot copy out of meta tensor; no data!\u001b[39m\u001b[33m\"\u001b[39m:\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\cuda\\__init__.py:363\u001b[39m, in \u001b[36m_lazy_init\u001b[39m\u001b[34m()\u001b[39m\n\u001b[32m 358\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[32m 359\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mCannot re-initialize CUDA in forked subprocess. To use CUDA with \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 360\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mmultiprocessing, you must use the \u001b[39m\u001b[33m'\u001b[39m\u001b[33mspawn\u001b[39m\u001b[33m'\u001b[39m\u001b[33m start method\u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 361\u001b[39m )\n\u001b[32m 362\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mhasattr\u001b[39m(torch._C, \u001b[33m\"\u001b[39m\u001b[33m_cuda_getDeviceCount\u001b[39m\u001b[33m\"\u001b[39m):\n\u001b[32m--> \u001b[39m\u001b[32m363\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mAssertionError\u001b[39;00m(\u001b[33m\"\u001b[39m\u001b[33mTorch not compiled with CUDA enabled\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m 364\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m _cudart \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[32m 365\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mAssertionError\u001b[39;00m(\n\u001b[32m 366\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mlibcudart functions unavailable. It looks like you have a broken build?\u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 367\u001b[39m )\n", + "\u001b[31mAssertionError\u001b[39m: Torch not compiled with CUDA enabled" + ] + } + ], + "source": [ + "#!/usr/bin/env python3\n", + "\"\"\n", + "dreamsim_heatmap_fn.py\n", + "\n", + "Callables to compute and save a DreamSim pairwise distance heatmap for images\n", + "in a folder — function-based (no argparse).\n", + "\n", + "Depends on:\n", + " pip install dreamsim pillow numpy matplotlib seaborn scikit-learn tqdm pandas\n", + "\"\"\n", + "\n", + "import os\n", + "import glob\n", + "from typing import Optional, Tuple, List, Union\n", + "\n", + "from PIL import Image\n", + "from tqdm import tqdm\n", + "\n", + "import numpy as np\n", + "import torch\n", + "from sklearn.metrics import pairwise_distances\n", + "import matplotlib.pyplot as plt\n", + "import seaborn as sns\n", + "import pandas as pd\n", + "\n", + "# DreamSim import (per their docs)\n", + "from dreamsim import dreamsim\n", + "\n", + "\n", + "# -----------------------\n", + "# Utilities\n", + "# -----------------------\n", + "def find_images(folder: str, exts: Tuple[str, ...] = (\"*.png\", \"*.jpg\", \"*.jpeg\", \"*.bmp\", \"*.tiff\")) -> List[str]:\n", + " paths = []\n", + " for e in exts:\n", + " paths.extend(glob.glob(os.path.join(folder, e)))\n", + " paths = sorted(paths)\n", + " return paths\n", + "\n", + "\n", + "def load_and_preprocess(path: str, preprocess, convert_rgb: bool = True) -> torch.Tensor:\n", + " img = Image.open(path)\n", + " if convert_rgb:\n", + " img = img.convert(\"RGB\")\n", + " t = preprocess(img)\n", + " if not isinstance(t, torch.Tensor):\n", + " t = torch.tensor(t)\n", + " if t.dim() == 3:\n", + " t = t.unsqueeze(0)\n", + " return t\n", + "\n", + "\n", + "def load_dreamsim_model(pretrained: bool = True, cache_dir: str = \"~/.cache\"):\n", + " \"\"\n", + " Returns model, preprocess\n", + " \"\"\n", + " model, preprocess = dreamsim(pretrained=pretrained, cache_dir=cache_dir)\n", + " return model, preprocess\n", + "\n", + "\n", + "def compute_embeddings_for_paths(\n", + " paths: List[str],\n", + " preprocess,\n", + " model,\n", + " device: torch.device,\n", + " max_images: Optional[int] = None,\n", + ") -> Tuple[np.ndarray, List[str]]:\n", + " if max_images:\n", + " paths = paths[:max_images]\n", + " emb_list = []\n", + " names = []\n", + " model.eval()\n", + " with torch.no_grad():\n", + " for p in tqdm(paths, desc=\"Preprocessing & embedding\"):\n", + " t = load_and_preprocess(p, preprocess).to(device)\n", + " emb = model.embed(t)\n", + " # handle tuple/list return\n", + " if isinstance(emb, (list, tuple)):\n", + " emb = emb[0]\n", + " emb = emb.squeeze(0).cpu().numpy()\n", + " emb_list.append(emb)\n", + " names.append(os.path.basename(p))\n", + " embeddings = np.vstack(emb_list)\n", + " return embeddings, names\n", + "\n", + "\n", + "def compute_pairwise_from_embeddings(embeddings: np.ndarray, metric: str = \"cosine\") -> np.ndarray:\n", + " return pairwise_distances(embeddings, metric=metric)\n", + "\n", + "\n", + "def compute_pairwise_exact_for_paths(\n", + " paths: List[str],\n", + " preprocess,\n", + " model,\n", + " device: torch.device,\n", + " max_images: Optional[int] = None,\n", + ") -> Tuple[np.ndarray, List[str]]:\n", + " if max_images:\n", + " paths = paths[:max_images]\n", + " n = len(paths)\n", + " dist = np.zeros((n, n), dtype=float)\n", + " tensors = []\n", + " for p in tqdm(paths, desc=\"Preprocessing\"):\n", + " t = load_and_preprocess(p, preprocess).to(device)\n", + " tensors.append(t)\n", + "\n", + " model.eval()\n", + " with torch.no_grad():\n", + " for i in tqdm(range(n), desc=\"Pairwise exact\"):\n", + " for j in range(n):\n", + " d = model(tensors[i], tensors[j])\n", + " if isinstance(d, torch.Tensor):\n", + " d = d.item()\n", + " dist[i, j] = float(d)\n", + " names = [os.path.basename(p) for p in paths]\n", + " return dist, names\n", + "\n", + "\n", + "def plot_and_save_heatmap(\n", + " dist_matrix: np.ndarray,\n", + " names: List[str],\n", + " out_png: str = \"distance_heatmap.png\",\n", + " out_csv: str = \"distance_matrix.csv\",\n", + " annotate: bool = False,\n", + "):\n", + " df = pd.DataFrame(dist_matrix, index=names, columns=names)\n", + " df.to_csv(out_csv)\n", + "\n", + " plt.figure(figsize=(max(6, len(names) * 0.6), max(6, len(names) * 0.6)))\n", + " sns.set_context(\"talk\")\n", + " ax = sns.heatmap(\n", + " df,\n", + " xticklabels=names,\n", + " yticklabels=names,\n", + " square=False,\n", + " cmap=\"viridis\",\n", + " cbar_kws={\"label\": \"distance\"},\n", + " annot=annotate if len(names) <= 20 else False,\n", + " fmt=\".3f\",\n", + " linewidths=0.2,\n", + " )\n", + " plt.xticks(rotation=90)\n", + " plt.yticks(rotation=0)\n", + " plt.tight_layout()\n", + " plt.savefig(out_png, dpi=200)\n", + " plt.close()\n", + "\n", + "\n", + "# -----------------------\n", + "# High-level function (call this)\n", + "# -----------------------\n", + "def build_and_save_dreamsim_heatmap(\n", + " folder: str,\n", + " mode: str = \"fast\", # \"fast\" or \"exact\"\n", + " cache_dir: str = \"~/.cache\",\n", + " pretrained: bool = True,\n", + " max_images: Optional[int] = None,\n", + " out_png: str = \"distance_heatmap.png\",\n", + " out_csv: str = \"distance_matrix.csv\",\n", + " use_cuda: Optional[bool] = None, # None => auto-detect\n", + " annotate: bool = False,\n", + " model_preloaded: Optional[Tuple] = None, # (model, preprocess) if already loaded\n", + ") -> Tuple[np.ndarray, List[str]]:\n", + " \"\"\n", + " Compute pairwise distance heatmap for images in `folder` and save results.\n", + "\n", + " Returns:\n", + " dist_matrix (np.ndarray): shape (N,N)\n", + " names (List[str]): list of basenames in same order\n", + " \"\"\n", + " # 1) find images\n", + " paths = find_images(folder)\n", + " if len(paths) == 0:\n", + " raise FileNotFoundError(f\"No images found in: {folder}\")\n", + "\n", + " # optional truncate\n", + " if max_images:\n", + " paths = paths[:max_images]\n", + "\n", + " # 2) load model (or use provided)\n", + " if model_preloaded is not None:\n", + " model, preprocess = model_preloaded\n", + " else:\n", + " model, preprocess = load_dreamsim_model(pretrained=pretrained, cache_dir=cache_dir)\n", + "\n", + " # 3) device logic\n", + " if use_cuda is None:\n", + " use_cuda = torch.cuda.is_available()\n", + " device = torch.device(\"cuda\" if use_cuda else \"cpu\")\n", + " try:\n", + " model.to(device)\n", + " except Exception:\n", + " # some models may not support .to()\n", + " pass\n", + "\n", + " # 4) compute distances\n", + " if mode == \"fast\":\n", + " embeddings, names = compute_embeddings_for_paths(paths, preprocess, model, device, max_images=None)\n", + " dist = compute_pairwise_from_embeddings(embeddings, metric=\"cosine\")\n", + " elif mode == \"exact\":\n", + " dist, names = compute_pairwise_exact_for_paths(paths, preprocess, model, device, max_images=None)\n", + " else:\n", + " raise ValueError(\"mode must be 'fast' or 'exact'\")\n", + "\n", + " # 5) plot + save\n", + " plot_and_save_heatmap(dist, names, out_png=out_png, out_csv=out_csv, annotate=annotate)\n", + "\n", + " # 6) return for programmatic use\n", + " return dist, names\n", + "\n", + "\n", + "# -----------------------\n", + "# Example usage\n", + "# -----------------------\n", + "if __name__ == \"__main__\":\n", + " # Example: call the function directly (no argparse)\n", + " folder = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\test_files\" # change to your folder\n", + " dist_matrix, filenames = build_and_save_dreamsim_heatmap(\n", + " folder=folder,\n", + " mode=\"fast\",\n", + " cache_dir=r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\.cache\\dreamsim\",\n", + " pretrained=True,\n", + " max_images=200, # or None\n", + " out_png=\"heatmap.png\",\n", + " out_csv=\"distances.csv\",\n", + " use_cuda=None, # auto\n", + " annotate=False,\n", + " )\n", + " print(\"Done. Saved heatmap and CSV. Matrix shape:\", dist_matrix.shape)\n" + ] + }, + { + "cell_type": "markdown", + "id": "1ff59006", + "metadata": {}, + "source": [ + "# Embedding code here" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "bcb855dd", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n", + "Using a slow image processor as `use_fast` is unset and a slow processor was saved with this model. `use_fast=True` will be the default behavior in v4.52, even if the model was saved with a slow processor. This will result in minor differences in outputs. You'll still be able to use a slow processor with `use_fast=False`.\n" + ] + } + ], + "source": [ + "# save as dinov2_similarity.py\n", + "import os\n", + "from PIL import Image\n", + "import torch\n", + "from transformers import AutoImageProcessor, AutoModel\n", + "import numpy as np\n", + "from sklearn.neighbors import NearestNeighbors\n", + "\n", + "# --- Config ---\n", + "MODEL = \"facebook/dinov2-small\" # small = faster on CPU; also available: dinov2-base, dinov2-large\n", + "BATCH_SIZE = 8\n", + "DEVICE = torch.device(\"cpu\")\n", + "\n", + "# --- Load model & processor ---\n", + "processor = AutoImageProcessor.from_pretrained(MODEL)\n", + "model = AutoModel.from_pretrained(MODEL)\n", + "model.eval().to(DEVICE)\n", + "\n", + "# --- Helpers ---\n", + "def embed_images(paths, batch_size=BATCH_SIZE):\n", + " \"\"\"\n", + " Returns: numpy array shape (N, D) of L2-normalized embeddings (float32)\n", + " \"\"\"\n", + " embeddings = []\n", + " imgs = []\n", + " for p in paths:\n", + " with Image.open(p) as img:\n", + " imgs.append(img.convert(\"RGB\"))\n", + " # process in batches\n", + " for i in range(0, len(imgs), batch_size):\n", + " batch_imgs = imgs[i : i + batch_size]\n", + " inputs = processor(images=batch_imgs, return_tensors=\"pt\")\n", + " inputs = {k: v.to(DEVICE) for k, v in inputs.items()}\n", + " with torch.no_grad():\n", + " out = model(**inputs)\n", + " cls = out.last_hidden_state[:, 0, :] # (B, D)\n", + " cls = torch.nn.functional.normalize(cls, p=2, dim=1)\n", + " embeddings.append(cls.cpu().numpy())\n", + " return np.vstack(embeddings)\n", + "\n", + "def build_index(embeddings, metric=\"cosine\"):\n", + " \"\"\"\n", + " Build exact NearestNeighbors index (brute force / efficient BLAS).\n", + " Returns trained sklearn NearestNeighbors object.\n", + " \"\"\"\n", + " nn = NearestNeighbors(n_neighbors=10, metric=metric, algorithm=\"brute\")\n", + " nn.fit(embeddings)\n", + " return nn\n", + "\n", + "def query_image(path, nn_index, paths_list, top_k=5):\n", + " emb = embed_images([path])\n", + " dists, idxs = nn_index.kneighbors(emb, n_neighbors=top_k)\n", + " results = []\n", + " for dist, idx in zip(dists[0], idxs[0]):\n", + " sim = 1 - dist # cosine similarity proxy\n", + " results.append((paths_list[idx], float(sim)))\n", + " return results" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "497546f4", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from transformers import AutoImageProcessor, AutoModel\n", + "import numpy as np\n", + "from PIL import Image\n", + "\n", + "# --- Config (tune threads as needed) ---\n", + "DINOV2_MODEL = \"facebook/dinov2-small\" # small = best CPU latency/quality tradeoff\n", + "DEVICE = torch.device(\"cpu\")\n", + "torch.set_num_threads(4) # tune for your CPU\n", + "\n", + "# --- Globals for single-shot model load ---\n", + "_dinov2_processor = None\n", + "_dinov2_model = None\n", + "\n", + "\n", + "def init_dinov2(model_name: str = DINOV2_MODEL, device: torch.device = DEVICE):\n", + " \"\"\"\n", + " Lazy-initialize DINOv2 processor & model (call once before embedding).\n", + " \"\"\"\n", + " global _dinov2_processor, _dinov2_model\n", + " if _dinov2_processor is None or _dinov2_model is None:\n", + " _dinov2_processor = AutoImageProcessor.from_pretrained(model_name)\n", + " _dinov2_model = AutoModel.from_pretrained(model_name)\n", + " _dinov2_model.eval().to(device)\n", + "\n", + "\n", + "def embed_bytesio_list(bytesio_list, batch_size: int = 8):\n", + " \"\"\"\n", + " Accepts a list of BytesIO objects (each contains an image).\n", + " Returns: np.ndarray shape (N, D) of L2-normalized embeddings (dtype float32).\n", + " \"\"\"\n", + " if _dinov2_processor is None or _dinov2_model is None:\n", + " init_dinov2()\n", + "\n", + " imgs = []\n", + " for b in bytesio_list:\n", + " with Image.open(b) as img:\n", + " imgs.append(img.convert(\"RGB\").copy()) # copy ensures file handle is closed\n", + "\n", + " embs = []\n", + " for i in range(0, len(imgs), batch_size):\n", + " batch = imgs[i : i + batch_size]\n", + " inputs = _dinov2_processor(images=batch, return_tensors=\"pt\")\n", + " inputs = {k: v.to(DEVICE) for k, v in inputs.items()}\n", + " with torch.no_grad():\n", + " out = _dinov2_model(**inputs)\n", + " cls = out.last_hidden_state[:, 0, :] # (B, D)\n", + " cls = torch.nn.functional.normalize(cls, p=2, dim=1)\n", + " embs.append(cls.cpu().numpy())\n", + "\n", + " if not embs:\n", + " return np.zeros((0, _dinov2_model.config.hidden_size), dtype=np.float32)\n", + "\n", + " return np.vstack(embs).astype(np.float32)\n", + "\n", + "\n", + "def l2_normalize_rows(a: np.ndarray, eps: float = 1e-12) -> np.ndarray:\n", + " \"\"\"\n", + " Row-wise L2 normalization for numpy arrays.\n", + " \"\"\"\n", + " norm = np.linalg.norm(a, axis=1, keepdims=True)\n", + " return a / (norm + eps)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "2a28592c", + "metadata": {}, + "outputs": [], + "source": [ + "from pathlib import Path\n", + "import os\n", + "\n", + "BASE_DIR = Path(r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\")\n", + "BLOCKS_DIR = BASE_DIR / \"blocks\"\n", + "STATIC_DIR = BASE_DIR / \"static\"\n", + "GEN_PROJECT_DIR = BASE_DIR / \"generated_projects\"\n", + "BACKDROP_DIR = BLOCKS_DIR / \"Backdrops\"\n", + "SPRITE_DIR = BLOCKS_DIR / \"sprites\"\n", + "CODE_BLOCKS_DIR = BLOCKS_DIR / \"code_blocks\"\n", + "OUTPUT_DIR = BASE_DIR / \"outputs\"\n", + "\n", + "for d in (\n", + " BLOCKS_DIR,\n", + " STATIC_DIR,\n", + " GEN_PROJECT_DIR,\n", + " BACKDROP_DIR,\n", + " SPRITE_DIR,\n", + " CODE_BLOCKS_DIR,\n", + " OUTPUT_DIR,\n", + " # DETECTED_IMAGE_DIR,\n", + " # SCANNED_IMAGE_DIR,\n", + " # JSON_DIR,\n", + "):\n", + " d.mkdir(parents=True, exist_ok=True)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "d4bbc69b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\n" + ] + } + ], + "source": [ + "print(BACKDROP_DIR)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8838f96d", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from transformers import AutoImageProcessor, AutoModel\n", + "import numpy as np\n", + "from PIL import Image\n", + "\n", + "# --- Config (tune threads as needed) ---\n", + "DINOV2_MODEL = \"facebook/dinov2-small\" # small = best CPU latency/quality tradeoff\n", + "DEVICE = torch.device(\"cpu\")\n", + "torch.set_num_threads(4) # tune for your CPU\n", + "\n", + "# --- Globals for single-shot model load ---\n", + "_dinov2_processor = None\n", + "_dinov2_model = None\n", + "\n", + "\n", + "def init_dinov2(model_name: str = DINOV2_MODEL, device: torch.device = DEVICE):\n", + " \"\"\"\n", + " Lazy-initialize DINOv2 processor & model (call once before embedding).\n", + " \"\"\"\n", + " global _dinov2_processor, _dinov2_model\n", + " if _dinov2_processor is None or _dinov2_model is None:\n", + " _dinov2_processor = AutoImageProcessor.from_pretrained(model_name)\n", + " _dinov2_model = AutoModel.from_pretrained(model_name)\n", + " _dinov2_model.eval().to(device)\n", + "\n", + "\n", + "def embed_bytesio_list(bytesio_list, batch_size: int = 8):\n", + " \"\"\"\n", + " Accepts a list of BytesIO objects (each contains an image).\n", + " Returns: np.ndarray shape (N, D) of L2-normalized embeddings (dtype float32).\n", + " \"\"\"\n", + " if _dinov2_processor is None or _dinov2_model is None:\n", + " init_dinov2()\n", + "\n", + " imgs = []\n", + " for b in bytesio_list:\n", + " with Image.open(b) as img:\n", + " imgs.append(img.convert(\"RGB\").copy()) # copy ensures file handle is closed\n", + "\n", + " embs = []\n", + " for i in range(0, len(imgs), batch_size):\n", + " batch = imgs[i : i + batch_size]\n", + " inputs = _dinov2_processor(images=batch, return_tensors=\"pt\")\n", + " inputs = {k: v.to(DEVICE) for k, v in inputs.items()}\n", + " with torch.no_grad():\n", + " out = _dinov2_model(**inputs)\n", + " cls = out.last_hidden_state[:, 0, :] # (B, D)\n", + " cls = torch.nn.functional.normalize(cls, p=2, dim=1)\n", + " embs.append(cls.cpu().numpy())\n", + "\n", + " if not embs:\n", + " return np.zeros((0, _dinov2_model.config.hidden_size), dtype=np.float32)\n", + "\n", + " return np.vstack(embs).astype(np.float32)\n", + "\n", + "\n", + "def l2_normalize_rows(a: np.ndarray, eps: float = 1e-12) -> np.ndarray:\n", + " \"\"\"\n", + " Row-wise L2 normalization for numpy arrays.\n", + " \"\"\"\n", + " norm = np.linalg.norm(a, axis=1, keepdims=True)\n", + " return a / (norm + eps)\n", + "\n", + "from pathlib import Path\n", + "from io import BytesIO\n", + "import json\n", + "\n", + "def regenerate_reference_embeddings(folder_image_paths, out_path):\n", + " init_dinov2()\n", + "\n", + " bytes_list = []\n", + " valid_paths = []\n", + "\n", + " for p in folder_image_paths:\n", + " p = Path(p) # convert str to Path\n", + " if not p.exists():\n", + " print(f\"Missing file: {p}\")\n", + " continue\n", + " with open(p, \"rb\") as f:\n", + " b = BytesIO(f.read())\n", + " b.seek(0)\n", + " bytes_list.append(b)\n", + " valid_paths.append(p)\n", + "\n", + " embs = embed_bytesio_list(bytes_list, batch_size=8) # (M, D)\n", + "\n", + " out_json = []\n", + " for i, p in enumerate(valid_paths):\n", + " out_json.append({\n", + " \"path\": str(p), # ensure string in JSON\n", + " \"embeddings\": embs[i].tolist()\n", + " })\n", + "\n", + " with open(out_path, \"w\") as f:\n", + " json.dump(out_json, f)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "07ddfa4b", + "metadata": {}, + "outputs": [], + "source": [ + "from pathlib import Path\n", + "from io import BytesIO\n", + "import json\n", + "import traceback\n", + "\n", + "def regenerate_reference_embeddings(folder_image_paths, out_path, sort_inputs=True, batch_size=8):\n", + " \"\"\"\n", + " Create JSON file `out_path` containing list of {\"path\": \"...\", \"embeddings\": [...]}\n", + " folder_image_paths : iterable of file paths (strings or Path)\n", + " Returns number of saved embeddings.\n", + " \"\"\"\n", + " # ensure model is loaded\n", + " init_dinov2()\n", + "\n", + " # Normalize and optionally sort input list for deterministic ordering\n", + " paths_in = [Path(p) for p in folder_image_paths]\n", + " if sort_inputs:\n", + " paths_in = sorted(paths_in, key=lambda p: p.name)\n", + "\n", + " bytes_list = []\n", + " valid_paths = []\n", + "\n", + " # Read files into BytesIO, but skip unreadable files\n", + " for p in paths_in:\n", + " if not p.exists():\n", + " print(f\"[skipped] missing file: {p}\")\n", + " continue\n", + " try:\n", + " with open(p, \"rb\") as f:\n", + " b = BytesIO(f.read())\n", + " b.seek(0)\n", + " bytes_list.append(b)\n", + " valid_paths.append(p)\n", + " except Exception as e:\n", + " print(f\"[skipped] failed to read {p}: {e}\")\n", + " traceback.print_exc()\n", + "\n", + " if not valid_paths:\n", + " raise RuntimeError(\"No valid images found to embed.\")\n", + "\n", + " # Embed (this returns shape (M, D))\n", + " embs = embed_bytesio_list(bytes_list, batch_size=batch_size)\n", + "\n", + " # Defensive check: counts must match\n", + " if embs.shape[0] != len(valid_paths):\n", + " # Print diagnostics and raise — this protects against silent misalignment.\n", + " print(\"ERROR: embeddings count != valid paths count\")\n", + " print(\"valid_paths:\", len(valid_paths))\n", + " print(\"embs.shape[0]:\", embs.shape[0])\n", + " # Optionally write partial diagnostic file for debugging\n", + " raise RuntimeError(\"Embeddings count does not match number of valid input files. Aborting.\")\n", + "\n", + " # Prepare JSON output\n", + " out_json = []\n", + " for i, p in enumerate(valid_paths):\n", + " out_json.append({\n", + " \"path\": str(p),\n", + " \"embedding\": embs[i].tolist()\n", + " })\n", + "\n", + " # Write atomically (write to tmp then move)\n", + " tmp_out = Path(out_path).with_suffix(\".tmp.json\")\n", + " with open(tmp_out, \"w\", encoding=\"utf-8\") as f:\n", + " json.dump(out_json, f, ensure_ascii=False)\n", + " tmp_out.replace(Path(out_path)) # atomic on most OSes\n", + "\n", + " print(f\"[ok] saved {len(out_json)} embeddings -> {out_path}\")\n", + " return len(out_json)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "4cb7e1ba", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[skipped] missing file: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Arctic.sb3\\8eb8790be5507fdccf73e7c1570bbbab.png\n", + "[ok] saved 983 embeddings -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\embed2.json\n" + ] + }, + { + "data": { + "text/plain": [ + "983" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import cv2, json,base64,io,os,tempfile,logging, re\n", + "from io import BytesIO\n", + "folder_image_paths = [\n", + "SPRITE_DIR / \"Abby.sprite3\" / \"34a175600dc009a521eb46fdbbbeeb67.png\"\n", + ",SPRITE_DIR / \"Abby.sprite3\" / \"45de34b47a2ce22f6f5d28bb35a44ff5.png\"\n", + ",SPRITE_DIR / \"Abby.sprite3\" / \"809d9b47347a6af2860e7a3a35bce057.png\"\n", + ",SPRITE_DIR / \"Abby.sprite3\" / \"920f14335615fff9b8c55fccb8971984.png\"\n", + ",SPRITE_DIR / \"Amon.sprite3\" / \"60f720956ab1840431dcf0616ce98f14.png\"\n", + ",SPRITE_DIR / \"Andie.sprite3\" / \"b36584db82bdd45014430aa918461ca0.png\"\n", + ",SPRITE_DIR / \"Andie.sprite3\" / \"b3fc774e753fef520fb544127a48554b.png\"\n", + ",SPRITE_DIR / \"Andie.sprite3\" / \"d92aaf6cf44921905d51ca4a10a4f3d6.png\"\n", + ",SPRITE_DIR / \"Andie.sprite3\" / \"ded71c8a0f39852178f1695b622c2d89.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"105f4f3d260dcb8bea02ea9ee5d18cf4.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"2d208a34e74fdce9dab9d4c585dcfa2b.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"3948aad16f8169c013c956dd152a09a6.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"4931a363e3e4efa20230f6ff2991c6b4.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"62c50c90535b64f2ae130a5c680ddcb4.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"7bb9c790b02231e1272701167c26b17a.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"84c5e22b4303c7c1fb707125706c9aaa.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"b7693bd6250d4411ee622b67f8025924.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"ca27e001a263ee6b5852508f39d021db.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"d86bb27b4f8d7b70c39c96f29c6943b4.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"db6c03113f71b91f22a9f3351f90e5bf.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"e3698b76cb0864df2fbaba80e6bd8067.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"ed90e8b7a05c1552194af597ac0637cd.png\"\n", + ",SPRITE_DIR / \"Apple.sprite3\" / \"3826a4091a33e4d26f87a2fac7cf796b.png\"\n", + ",SPRITE_DIR / \"Arrow1.sprite3\" / \"65b8e977641885010a10a46512fb95b4.png\"\n", + ",SPRITE_DIR / \"Arrow1.sprite3\" / \"70ffa0bae8693418459f21f370584f6d.png\"\n", + ",SPRITE_DIR / \"Arrow1.sprite3\" / \"be8fcd10da0b082f8d4775088ef7bd52.png\"\n", + ",SPRITE_DIR / \"Arrow1.sprite3\" / \"dafcdfda65af14e172809984710f31a9.png\"\n", + ",SPRITE_DIR / \"Avery Walking.sprite3\" / \"3a935fe75ac999e22b93d06b3081a271.png\"\n", + ",SPRITE_DIR / \"Avery Walking.sprite3\" / \"448e54fb14b13d492885fc247e76b7f4.png\"\n", + ",SPRITE_DIR / \"Avery Walking.sprite3\" / \"8f439476a738251043d488d7a4bc6870.png\"\n", + ",SPRITE_DIR / \"Avery Walking.sprite3\" / \"dc6a584704c09a3fbafb9825635a9fd4.png\"\n", + ",SPRITE_DIR / \"Avery.sprite3\" / \"944385ea927e8f9d72b9e19620487999.png\"\n", + ",SPRITE_DIR / \"Avery.sprite3\" / \"f52bde34d8027aab14b53f228fe5cc14.png\"\n", + ",SPRITE_DIR / \"Ball.sprite3\" / \"1c44b7494dec047371f74c705f1d99fc.png\"\n", + ",SPRITE_DIR / \"Ball.sprite3\" / \"3c6241985b581284ec191f9d1deffde8.png\"\n", + ",SPRITE_DIR / \"Ball.sprite3\" / \"ad7dc51cafd73e8279073e33b0eab335.png\"\n", + ",SPRITE_DIR / \"Ball.sprite3\" / \"db144b2a19f4f1ab31e30d58f00447dc.png\"\n", + ",SPRITE_DIR / \"Ball.sprite3\" / \"f221a2edf87aff3615c0c003e616b31b.png\"\n", + ",SPRITE_DIR / \"Ballerina.sprite3\" / \"4ccb1752a43f48aafe490c9c08e58c27.png\"\n", + ",SPRITE_DIR / \"Ballerina.sprite3\" / \"5197d3778baf55da6b81b3ada1e10021.png\"\n", + ",SPRITE_DIR / \"Ballerina.sprite3\" / \"5aae21aee33c3f1ae943af5ea11254bf.png\"\n", + ",SPRITE_DIR / \"Ballerina.sprite3\" / \"fc02bf591dd3d91eeeb50c7424d08274.png\"\n", + ",SPRITE_DIR / \"Balloon1.sprite3\" / \"63e5aea255610f9fdf0735e1e9a55a5c.png\"\n", + ",SPRITE_DIR / \"Balloon1.sprite3\" / \"a2516ac2b8d7a348194908e630387ea9.png\"\n", + ",SPRITE_DIR / \"Balloon1.sprite3\" / \"d7974f9e15000c16222f94ee32d8227a.png\"\n", + ",SPRITE_DIR / \"Bananas.sprite3\" / \"e5d3d3eb61797f5999732a8f5efead24.png\"\n", + ",SPRITE_DIR / \"Baseball.sprite3\" / \"74e08fc57820f925c7689e7b754c5848.png\"\n", + ",SPRITE_DIR / \"Basketball.sprite3\" / \"6b0b2aaa12d655e96b5b34e92d9fbd4f.png\"\n", + ",SPRITE_DIR / \"Bat.sprite3\" / \"4e4ced87ed37ee66c758bba077e0eae6.png\"\n", + ",SPRITE_DIR / \"Bat.sprite3\" / \"60f5bfce5d9b11bfcd199a6aa5454b3f.png\"\n", + ",SPRITE_DIR / \"Bat.sprite3\" / \"698c2a48e774f9959d57c9618b156c20.png\"\n", + ",SPRITE_DIR / \"Bat.sprite3\" / \"bc6dd12fc9e407c7774959cdf427f8b5.png\"\n", + ",SPRITE_DIR / \"Batter.sprite3\" / \"592ee9ab2aeefe65cb4fb95fcd046f33.png\"\n", + ",SPRITE_DIR / \"Batter.sprite3\" / \"9d193bef6e3d6d8eba6d1470b8bf9351.png\"\n", + ",SPRITE_DIR / \"Batter.sprite3\" / \"baseball_sprite_motion_1.png\"\n", + ",SPRITE_DIR / \"Batter.sprite3\" / \"bd4fc003528acfa847e45ff82f346eee.png\"\n", + ",SPRITE_DIR / \"Batter.sprite3\" / \"fdfde4bcbaca0f68e83fdf3f4ef0c660.png\"\n", + ",SPRITE_DIR / \"Beachball.sprite3\" / \"5198b5a03ebae60698e0906f59a5fc15.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"0a38a860f2e573b8dc5b09f390d30fbd.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"36d06aa23c684fc996952adb0e76e6b4.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"6d4d06e3f4cd0c9455b777b9a40782b6.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"6d50c5fe63ab5f77d10144a68ca535a6.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"7453709bef16e33e6f989aee14d7fc07.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"d2a5f124f988def1d214e6d0813a48f3.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"e531b307381c2aa148be4ccc36db0333.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"f36c80d2e731be95df7ec6d07f89fa00.png\"\n", + ",SPRITE_DIR / \"Bear.sprite3\" / \"6f303e972f33fcb7ef36d0d8012d0975.png\"\n", + ",SPRITE_DIR / \"Bear.sprite3\" / \"bear_motion_2.png\"\n", + ",SPRITE_DIR / \"Bear.sprite3\" / \"deef1eaa96d550ae6fc11524a1935024.png\"\n", + ",SPRITE_DIR / \"Beetle.sprite3\" / \"46d0dfd4ae7e9bfe3a6a2e35a4905eae.png\"\n", + ",SPRITE_DIR / \"Bell.sprite3\" / \"8c0234fe1bfd36f5a72e975fbbc18bfd.png\"\n", + ",SPRITE_DIR / \"Ben.sprite3\" / \"165d993c30dfdb9e829d0d98867d7826.png\"\n", + ",SPRITE_DIR / \"Ben.sprite3\" / \"2cd77b8a9961e7ad4da905e7731b7c1b.png\"\n", + ",SPRITE_DIR / \"Ben.sprite3\" / \"9f9f88aea3457084d8d734040b0b9067.png\"\n", + ",SPRITE_DIR / \"Ben.sprite3\" / \"acc208e29f0422c2bcffa3b8873abc63.png\"\n", + ",SPRITE_DIR / \"Block-A.sprite3\" / \"ef3b01f6fc1ffa1270fbbf057f7ded42.png\"\n", + ",SPRITE_DIR / \"Block-B.sprite3\" / \"1dc05fbaa37a6b41ffff459d0a776989.png\"\n", + ",SPRITE_DIR / \"Block-C.sprite3\" / \"43090c4b423c977041542ce12017fda0.png\"\n", + ",SPRITE_DIR / \"Block-D.sprite3\" / \"1fb3db31500d6f7da662e825157920fa.png\"\n", + ",SPRITE_DIR / \"Block-E.sprite3\" / \"240aacc04444cef3b2ef8cfaf0dae479.png\"\n", + ",SPRITE_DIR / \"Block-F.sprite3\" / \"d88d750ce848d7dbeeca3f02249350e2.png\"\n", + ",SPRITE_DIR / \"Block-G.sprite3\" / \"989c76ae7f8c2e42ebeacdda961061ca.png\"\n", + ",SPRITE_DIR / \"Block-H.sprite3\" / \"93426b2f313d1bdedff368d94fc989d6.png\"\n", + ",SPRITE_DIR / \"Block-I.sprite3\" / \"f911b18605f59c75adf4d83e07811fd8.png\"\n", + ",SPRITE_DIR / \"Block-J.sprite3\" / \"8580c990ac918577550165447f870542.png\"\n", + ",SPRITE_DIR / \"Block-K.sprite3\" / \"d93a9fd4bfb5bc1e9790945fa756b748.png\"\n", + ",SPRITE_DIR / \"Block-L.sprite3\" / \"579c90cbaf847e9adf4faf37f340b32d.png\"\n", + ",SPRITE_DIR / \"Block-M.sprite3\" / \"6c5cf1fd0673f441b04e15e799685831.png\"\n", + ",SPRITE_DIR / \"Block-N.sprite3\" / \"9eba5dd44d65e1d421c40686fecde906.png\"\n", + ",SPRITE_DIR / \"Block-O.sprite3\" / \"8bbbde09c13a06015e554ab36fa178c0.png\"\n", + ",SPRITE_DIR / \"Block-P.sprite3\" / \"0f920b99ac49421cf28e55c8d863bdc5.png\"\n", + ",SPRITE_DIR / \"Block-Q.sprite3\" / \"67f8e80eabaec4883eb9c67c9527004a.png\"\n", + ",SPRITE_DIR / \"Block-R.sprite3\" / \"9d0432c5575451e251990d89845f8d00.png\"\n", + ",SPRITE_DIR / \"Block-S.sprite3\" / \"83c7486b08e78d099b4e776aaa2783fe.png\"\n", + ",SPRITE_DIR / \"Block-T.sprite3\" / \"6c1b26611ec0483f601a648f59305aff.png\"\n", + ",SPRITE_DIR / \"Block-U.sprite3\" / \"d02f77994789f528f0aaa7f211690151.png\"\n", + ",SPRITE_DIR / \"Block-V.sprite3\" / \"0654cfcb6234406837336e90be7e419c.png\"\n", + ",SPRITE_DIR / \"Block-W.sprite3\" / \"2b3145ae89c32793c4fcea9a6bcc6075.png\"\n", + ",SPRITE_DIR / \"Block-X.sprite3\" / \"a73f354dc045bbbc5a491d9367192a80.png\"\n", + ",SPRITE_DIR / \"Block-Y.sprite3\" / \"e13e79f106d32a3176dbcf5c1b35827d.png\"\n", + ",SPRITE_DIR / \"Block-Z.sprite3\" / \"c57d371b291d43675f46601518098572.png\"\n", + ",SPRITE_DIR / \"Bowl.sprite3\" / \"d147f16e3e2583719c073ac5b55fe3ca.png\"\n", + ",SPRITE_DIR / \"Bowtie.sprite3\" / \"4b032ba44b8077439e73815542e7ed23.png\"\n", + ",SPRITE_DIR / \"Bread.sprite3\" / \"585de1550446d4420f8a10fdecac995b.png\"\n", + ",SPRITE_DIR / \"Broom.sprite3\" / \"556288a1c996345c751a3dc88b570cfa.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"148034b1557cc3dae39953e43ab50ff0.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"4212ff1769c169bfa0db043b18fdade8.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"80b120b7152ed72fded84fef485f4f79.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"8f64966be60d332b345598819c67a8b6.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"a8c977a3b85ffe8c8b453c9d668989b8.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"bb47a3d5d03a34937557c558c6cb5d18.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"d1fcce0aac589a17324943a3b759fc2a.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"e4764cfc384a499f92da3ea745bcebe2.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"e8c9508b1f6a0a432e09c10ef9ada67c.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"fcedb6b25a2db6de28b39130f978b0bf.png\"\n", + ",SPRITE_DIR / \"Butterfly 1.sprite3\" / \"34b76c1835c6a7fc2c47956e49bb0f52.png\"\n", + ",SPRITE_DIR / \"Butterfly 1.sprite3\" / \"49c9f952007d870a046cff93b6e5e098.png\"\n", + ",SPRITE_DIR / \"Butterfly 1.sprite3\" / \"fe98df7367e314d9640bfaa54fc239be.png\"\n", + ",SPRITE_DIR / \"Butterfly 2.sprite3\" / \"372ae0abd2e8e50a20bc12cb160d8746.png\"\n", + ",SPRITE_DIR / \"Butterfly 2.sprite3\" / \"e96f4c6913107c9b790d37bb65507c14.png\"\n", + ",SPRITE_DIR / \"Button1.sprite3\" / \"21fb7fa07eac4794fded0be4e18e20a2.png\"\n", + ",SPRITE_DIR / \"Button2.sprite3\" / \"329bf3d86050ceaea2b27e2c5d2baec1.png\"\n", + ",SPRITE_DIR / \"Button2.sprite3\" / \"af4cd54e776031bc9cc54ddd6892f97b.png\"\n", + ",SPRITE_DIR / \"Button3.sprite3\" / \"5021f6b7d166873ef0711c4d4a351912.png\"\n", + ",SPRITE_DIR / \"Button3.sprite3\" / \"a3b357ea21773bcb3545a227ee877e9a.png\"\n", + ",SPRITE_DIR / \"Button4.sprite3\" / \"71ced7c192168c7b221d16b4eaff440e.png\"\n", + ",SPRITE_DIR / \"Button4.sprite3\" / \"7d34ad26633abbc752c9cd93ace0a81f.png\"\n", + ",SPRITE_DIR / \"Button5.sprite3\" / \"94957f2f79e8970d8b2cd0f74a0c1ffc.png\"\n", + ",SPRITE_DIR / \"Button5.sprite3\" / \"a4bb9a9e06e65337798471035719985a.png\"\n", + ",SPRITE_DIR / \"Cake.sprite3\" / \"862488bf66b67c5330cae9235b853b6e.png\"\n", + ",SPRITE_DIR / \"Cake.sprite3\" / \"dfe9c5d40da0dcc386fad524c36d3579.png\"\n", + ",SPRITE_DIR / \"Calvrett.sprite3\" / \"452683db3ad7a882f5ab9de496441592.png\"\n", + ",SPRITE_DIR / \"Calvrett.sprite3\" / \"728ec1ebc275b53809023a36c66eeaa3.png\"\n", + ",SPRITE_DIR / \"Casey.sprite3\" / \"50bd5162671b8a30fcfa3082a9e79ec4.png\"\n", + ",SPRITE_DIR / \"Casey.sprite3\" / \"e09e5ef2bdeb69163a543f3216c1f54c.png\"\n", + ",SPRITE_DIR / \"Casey.sprite3\" / \"e5a47371f3e9f853b36560cda35344b6.png\"\n", + ",SPRITE_DIR / \"Casey.sprite3\" / \"ebc3de539e02801d420268eb189c5a47.png\"\n", + ",SPRITE_DIR / \"Cassy Dance.sprite3\" / \"63483bbf72fc55719918a335e1a16426.png\"\n", + ",SPRITE_DIR / \"Cassy Dance.sprite3\" / \"6cb3686db1fa658b6541cc9fa3ccfcc7.png\"\n", + ",SPRITE_DIR / \"Cassy Dance.sprite3\" / \"aca39a47cf3affd8a83d3287d2856c29.png\"\n", + ",SPRITE_DIR / \"Cassy Dance.sprite3\" / \"f801cec764da5ef6374e1d557296d14e.png\"\n", + ",SPRITE_DIR / \"Cat 2.sprite3\" / \"7499cf6ec438d0c7af6f896bc6adc294.png\"\n", + ",SPRITE_DIR / \"Cat Flying.sprite3\" / \"6667936a2793aade66c765c329379ad0.png\"\n", + ",SPRITE_DIR / \"Cat Flying.sprite3\" / \"a1ab94c8172c3b97ed9a2bf7c32172cd.png\"\n", + ",SPRITE_DIR / \"Cat.sprite3\" / \"0fb9be3e8397c983338cb71dc84d0b25.png\"\n", + ",SPRITE_DIR / \"Cat.sprite3\" / \"bcf454acf82e4504149f7ffe07081dbc.png\"\n", + ",SPRITE_DIR / \"Catcher.sprite3\" / \"895cdda4f2bd9d6f50ff07188e7ce395.png\"\n", + ",SPRITE_DIR / \"Catcher.sprite3\" / \"8aa875f077c405e2045f5ab60705e712.png\"\n", + ",SPRITE_DIR / \"Catcher.sprite3\" / \"99af13802e9bfd7b4a4bfb8ead825c0c.png\"\n", + ",SPRITE_DIR / \"Catcher.sprite3\" / \"a31e30677637ae4de975d40b6d822853.png\"\n", + ",SPRITE_DIR / \"Centaur.sprite3\" / \"2373556e776cad3ba4d6ee04fc34550b.png\"\n", + ",SPRITE_DIR / \"Centaur.sprite3\" / \"c00ffa6c5dd0baf9f456b897ff974377.png\"\n", + ",SPRITE_DIR / \"Centaur.sprite3\" / \"d722329bd9373ad80625e5be6d52f3ed.png\"\n", + ",SPRITE_DIR / \"Centaur.sprite3\" / \"d7aa990538915b7ef1f496d7e8486ade.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"20318b14a332fd618ec91e7c1de8be9a.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"26fdff424232926001d20041c3d5673b.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"56f3220fa82d99dcfc7d27d433ed01e4.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"68453506ae4b6b60a3fc6817ba39d492.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"7b073f47fbd9421e0d60daacc157f506.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"a28ffc2b129fb359ff22c79c48341267.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"d6ae13605610aa008d48b0c8b25a57d3.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"03bc23a9fa12c1244c83a07a81f20bfd.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"0f18f9e90d0ed68ebec23da087eb2603.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"1044a68cc743f83564e36a6bca16830b.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"1e303bb57aac0cb4678e85de4251f3f4.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"527ba82c5e82f43c8fca0be905dbe20a.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"5e2f620e5687a36e1954414054c69ccc.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"6be261800647c53becb1f93ed31ed13e.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"6d5ddfc69f9c6a3f1d2ded1428237931.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"6f78ce6a87d114162ed9fbef30f9a0fd.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"984043e1e7c544999c31f952d1d43a56.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"b37d0e0d46f07cb2cbdc5285e176bf62.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"cc0be722cf93eef63726bd606ab11c5c.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"f26b130c2c58b812be21d1a9745863a1.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"1cf73a791959e07b5bafe18474f93b78.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"67d425b11544caa0fe9228f355c6485b.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"7084b3baab935de819cc5ab46f7cecf8.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"93e035270675f933b94ee951d7e475e3.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"bf0d808f7bf0c11c338b4fea0a735874.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"db3f436fcb6fb28828a4c932b60feb5e.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"df7cbf2913bcea721df2e0360644f193.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"e0eacf1e575adc559c41e3a81a892168.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"e8b44b0e904fd4bb7430c26b743f1520.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"f4f2778df2840de5a6449a49f3efb599.png\"\n", + ",SPRITE_DIR / \"Cheesy Puffs.sprite3\" / \"82772a61ec74974e84c686c61ea0b7d5.png\"\n", + ",SPRITE_DIR / \"Chick.sprite3\" / \"5e23c8c28ffd390df7deb2414be37781.png\"\n", + ",SPRITE_DIR / \"Chick.sprite3\" / \"77911bbe5e11ede35871e8002a26356d.png\"\n", + ",SPRITE_DIR / \"Chick.sprite3\" / \"80abbc427366bca477ccf1ef0faf240a.png\"\n", + ",SPRITE_DIR / \"City Bus.sprite3\" / \"7d7e26014a346b894db8ab1819f2167f.png\"\n", + ",SPRITE_DIR / \"City Bus.sprite3\" / \"e9694adbff9422363e2ea03166015393.png\"\n", + ",SPRITE_DIR / \"Cloud.sprite3\" / \"c9630e30e59e4565e785a26f58568904.png\"\n", + ",SPRITE_DIR / \"Clouds.sprite3\" / \"0188b2c7c85176b462881c6bca7a7748.png\"\n", + ",SPRITE_DIR / \"Clouds.sprite3\" / \"9105d7dd90b5f2a4b85a1e71aff8703f.png\"\n", + ",SPRITE_DIR / \"Clouds.sprite3\" / \"9f2eccce13e3e5fd212efd59ff1d96a0.png\"\n", + ",SPRITE_DIR / \"Clouds.sprite3\" / \"9f5958f46d21e33d3f6d7caffbe0daa9.png\"\n", + ",SPRITE_DIR / \"Convertible 2.sprite3\" / \"621817ef84ad81f5690fac95adab2ede.png\"\n", + ",SPRITE_DIR / \"Convertible.sprite3\" / \"5b883f396844ff5cfecd7c95553fa4fb.png\"\n", + ",SPRITE_DIR / \"Crab.sprite3\" / \"49839aa1b0feed02a3c759db5f8dee71.png\"\n", + ",SPRITE_DIR / \"Crab.sprite3\" / \"f7cdd2acbc6d7559d33be8675059c79e.png\"\n", + ",SPRITE_DIR / \"Crystal.sprite3\" / \"0a7b872042cecaf30cc154c0144f002b.png\"\n", + ",SPRITE_DIR / \"Crystal.sprite3\" / \"ecd1e7805b37db4caf207b7eef2b7a42.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"05529eb3c09294bd15f57c6f10d5894e.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"12db59633a1709a2c39534d35263791f.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"19bd7995d37e3baade673b2fe7cb982b.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"32ec7b5332cfebd1cfed7f6b79c76e67.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"344384a6a3f1bdf494cc7af31e928d36.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"3cdebabdb41f6c3e84561cf3ea87bac3.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"50faf1630ea383c0b8c77f70a9329797.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"70da166596bb484eae1bfbaad5c03d54.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"729812366245c0dafd456339c9d94e08.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"a22da98e5e63de7b2883355afd0184f0.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"a4b5d644d9abdbcab236acf19b2a2e81.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"dafbdfe454c5ec7029b5c1e07fcabc90.png\"\n", + ",SPRITE_DIR / \"Dan.sprite3\" / \"307250744e230fb15e7062238bf2634c.png\"\n", + ",SPRITE_DIR / \"Dan.sprite3\" / \"89b55d049f4b3811676311df00681385.png\"\n", + ",SPRITE_DIR / \"Dani.sprite3\" / \"2cba86439098a7e0daa46e0ff8a59f7c.png\"\n", + ",SPRITE_DIR / \"Dani.sprite3\" / \"6518333c95cf96a9aaf73a4a948e002f.png\"\n", + ",SPRITE_DIR / \"Dani.sprite3\" / \"b5f989e21b56af371209369c331b821e.png\"\n", + ",SPRITE_DIR / \"Dee.sprite3\" / \"1de3bbee2771b0ff16c4658d5ad98b0b.png\"\n", + ",SPRITE_DIR / \"Dee.sprite3\" / \"320a892c86e9b039ba9d6d50a4897276.png\"\n", + ",SPRITE_DIR / \"Dee.sprite3\" / \"43bd4c241a94b3aea883472d7dab5afc.png\"\n", + ",SPRITE_DIR / \"Dee.sprite3\" / \"c57c4593701165cdea6de9b014c7c06d.png\"\n", + ",SPRITE_DIR / \"Dee.sprite3\" / \"e4c6ada3509f7033d14bac2c0eea49dc.png\"\n", + ",SPRITE_DIR / \"Devin.sprite3\" / \"5ab51aeaa296e955e75a7a3c103ebb99.png\"\n", + ",SPRITE_DIR / \"Devin.sprite3\" / \"5f614017dba0ce6bff063f6c62041035.png\"\n", + ",SPRITE_DIR / \"Devin.sprite3\" / \"9d7414a719d6cc5e0e9071ede200a29c.png\"\n", + ",SPRITE_DIR / \"Devin.sprite3\" / \"bfc7c20b64f86d4b207780f3da695fa4.png\"\n", + ",SPRITE_DIR / \"Dinosaur1.sprite3\" / \"22d94ee5daf557284465425a61186234.png\"\n", + ",SPRITE_DIR / \"Dinosaur1.sprite3\" / \"45b02fbd582c15a50e1953830b59b377.png\"\n", + ",SPRITE_DIR / \"Dinosaur1.sprite3\" / \"7f89417968116ada83d4ddaad22403b3.png\"\n", + ",SPRITE_DIR / \"Dinosaur1.sprite3\" / \"af158d368bf3da576369be1130e18acd.png\"\n", + ",SPRITE_DIR / \"Dinosaur2.sprite3\" / \"0e43f8e573bf232505b207b92efac2ac.png\"\n", + ",SPRITE_DIR / \"Dinosaur2.sprite3\" / \"7799f2848136d11f48ca5f3105d336ef.png\"\n", + ",SPRITE_DIR / \"Dinosaur2.sprite3\" / \"d926c5758d130fcfd9a7ae7dac47e47d.png\"\n", + ",SPRITE_DIR / \"Dinosaur2.sprite3\" / \"e606ba27dfe94daf3d8e3fdf599e37cf.png\"\n", + ",SPRITE_DIR / \"Dinosaur3.sprite3\" / \"5381feb0fc1b50ddc2793342daddffef.png\"\n", + ",SPRITE_DIR / \"Dinosaur3.sprite3\" / \"ae98efa1c3c3700602e1344db86aaf72.png\"\n", + ",SPRITE_DIR / \"Dinosaur3.sprite3\" / \"cf4fb77a4e9839f83d3fa5fc0982ccd3.png\"\n", + ",SPRITE_DIR / \"Dinosaur3.sprite3\" / \"d85ec1b97f73564ef26fec73d5056c68.png\"\n", + ",SPRITE_DIR / \"Dinosaur3.sprite3\" / \"e731d1f1ebf4bc0ea55b850ffe5a5f96.png\"\n", + ",SPRITE_DIR / \"Dinosaur4.sprite3\" / \"723bd1559f8baae4184fa24a6513362b.png\"\n", + ",SPRITE_DIR / \"Dinosaur4.sprite3\" / \"a98e3f93853513e7c00bab4c61752312.png\"\n", + ",SPRITE_DIR / \"Dinosaur4.sprite3\" / \"ac99ef62e3e018b8db550bb2a187cbe9.png\"\n", + ",SPRITE_DIR / \"Dinosaur4.sprite3\" / \"c63cca929380152b978d8671fe6003f7.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"26fca11e4251d60ed7aa5d08f4ae2a69.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"3b2cf97b1cc7fc535162ba5849a0e29c.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"42e3bf118c775ba54239af4276800a0a.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"5882227a9e2f0f3b2014c49328969762.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"5a0832162a0cfa7adab6090c42e89714.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"9d200a7c2e93eac8cf52ede3a87d7969.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"c4044a3badea77ced4f2db69aff866ed.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"f49b3b098a24474f20c8f4686681c611.png\"\n", + ",SPRITE_DIR / \"Diver1.sprite3\" / \"a24f23a0f5d77cfb59721ef8f6bfe5c7.png\"\n", + ",SPRITE_DIR / \"Diver2.sprite3\" / \"ef8136a42b7d20961756e551bc87b37f.png\"\n", + ",SPRITE_DIR / \"Dog1.sprite3\" / \"35cd78a8a71546a16c530d0b2d7d5a7f.png\"\n", + ",SPRITE_DIR / \"Dog1.sprite3\" / \"d5a72e1eb23a91df4b53c0b16493d1e6.png\"\n", + ",SPRITE_DIR / \"Dog2.sprite3\" / \"4708bff29b3a295a03ac1d5e2d16ec75.png\"\n", + ",SPRITE_DIR / \"Dog2.sprite3\" / \"66b435d333f34d02d5ae49a598bcc5b3.png\"\n", + ",SPRITE_DIR / \"Dog2.sprite3\" / \"6afc06388d69f99e28d883126f9b2734.png\"\n", + ",SPRITE_DIR / \"Donut.sprite3\" / \"316a67c9e966fd015b4538f54be456db.png\"\n", + ",SPRITE_DIR / \"Dorian.sprite3\" / \"603d3dd151984c0eaa2822f70a234c28.png\"\n", + ",SPRITE_DIR / \"Dorian.sprite3\" / \"7d20ec98603857c031c1f4ad2bd8ea51.png\"\n", + ",SPRITE_DIR / \"Dorian.sprite3\" / \"8f2be2387efcbb5d4878886adaa2a88e.png\"\n", + ",SPRITE_DIR / \"Dorian.sprite3\" / \"a9a064a1f28c9e22b594dcea1d46025b.png\"\n", + ",SPRITE_DIR / \"Dot.sprite3\" / \"106461f60e34ce231b323e2dd2d9f05b.png\"\n", + ",SPRITE_DIR / \"Dot.sprite3\" / \"21482022f9930400302bc8ec70643717.png\"\n", + ",SPRITE_DIR / \"Dot.sprite3\" / \"9e5a6cc6970ce4932a09affba70a45b0.png\"\n", + ",SPRITE_DIR / \"Dot.sprite3\" / \"fb047c94113ee4c6664305a338525e6a.png\"\n", + ",SPRITE_DIR / \"Dove.sprite3\" / \"0f83ab55012a7affd94e38250d55a0a0.png\"\n", + ",SPRITE_DIR / \"Dove.sprite3\" / \"778a699a044a0a8c10f44c3194e21ef2.png\"\n", + ",SPRITE_DIR / \"Dragon.sprite3\" / \"12ead885460d96a19132e5970839d36d.png\"\n", + ",SPRITE_DIR / \"Dragon.sprite3\" / \"3f672475ad4ca5d1f9331cffd4223140.png\"\n", + ",SPRITE_DIR / \"Dragon.sprite3\" / \"e0aa0083fa0b97da97600d4dbb2055e5.png\"\n", + ",SPRITE_DIR / \"Dragonfly.sprite3\" / \"17b864c1ddd4b349a6c4bd5709167307.png\"\n", + ",SPRITE_DIR / \"Dragonfly.sprite3\" / \"5cdfe67af929e3fb095e83c9c4b0bd78.png\"\n", + ",SPRITE_DIR / \"Dress.sprite3\" / \"4e22e6fd72500f0a25b959283bfd0a32.png\"\n", + ",SPRITE_DIR / \"Dress.sprite3\" / \"c5fb135d89573570010b0d96c94bcec6.png\"\n", + ",SPRITE_DIR / \"Dress.sprite3\" / \"ddbea537af6012ebac18d16d65c07479.png\"\n", + ",SPRITE_DIR / \"Drum Kit.sprite3\" / \"3f4fb4836338c55f883607c403b2b25e.png\"\n", + ",SPRITE_DIR / \"Drum Kit.sprite3\" / \"baf6344b6f55b074786a383c1097697d.png\"\n", + ",SPRITE_DIR / \"Drum-cymbal.sprite3\" / \"08355ec8cc4b3263f502adfdea993cda.png\"\n", + ",SPRITE_DIR / \"Drum-cymbal.sprite3\" / \"78398692e6fa226568df0374c4358da4.png\"\n", + ",SPRITE_DIR / \"Drum-highhat.sprite3\" / \"15b2a31a57d0cd911ad0b1c265dcf59e.png\"\n", + ",SPRITE_DIR / \"Drum-highhat.sprite3\" / \"866b3a49ee2a45998940e2d737c4c502.png\"\n", + ",SPRITE_DIR / \"Drum-snare.sprite3\" / \"28298d93f5282041267a92bd67308107.png\"\n", + ",SPRITE_DIR / \"Drum-snare.sprite3\" / \"c42bb05aab3cacddcd88712e33ab8df0.png\"\n", + ",SPRITE_DIR / \"Drum.sprite3\" / \"47531b5675be696d0540eb120d5d0678.png\"\n", + ",SPRITE_DIR / \"Drum.sprite3\" / \"ce6971317035091341ec40571c9056e9.png\"\n", + ",SPRITE_DIR / \"Drums Conga.sprite3\" / \"2b2eacfce0fb1af023e6ca0f5ef6defe.png\"\n", + ",SPRITE_DIR / \"Drums Conga.sprite3\" / \"bdad2f140cfbd021f38241fc9acc7fd2.png\"\n", + ",SPRITE_DIR / \"Drums Tabla.sprite3\" / \"992d6359be830d977559dad91b04f698.png\"\n", + ",SPRITE_DIR / \"Drums Tabla.sprite3\" / \"af071d9d714c5c622e2bb07133698ce3.png\"\n", + ",SPRITE_DIR / \"Duck.sprite3\" / \"c9837d0454f5f0f73df290af2045359b.png\"\n", + ",SPRITE_DIR / \"Earth.sprite3\" / \"7405b5efa96995bae6853667f8cd145e.png\"\n", + ",SPRITE_DIR / \"Easel.sprite3\" / \"6a736beddc7844538be390c18b7c4361.png\"\n", + ",SPRITE_DIR / \"Easel.sprite3\" / \"a4b3714322c11b350f09a75921ae606b.png\"\n", + ",SPRITE_DIR / \"Easel.sprite3\" / \"caec09682a7fcdffef4647e8355ba004.png\"\n", + ",SPRITE_DIR / \"Egg.sprite3\" / \"0d127490af16f8a4ca5ce3212b2391c2.png\"\n", + ",SPRITE_DIR / \"Egg.sprite3\" / \"41535b4742f40e2630746b0c4bec98f2.png\"\n", + ",SPRITE_DIR / \"Egg.sprite3\" / \"b0b6e88ec64b842398200bab562b53e3.png\"\n", + ",SPRITE_DIR / \"Egg.sprite3\" / \"bb0505b802140a8cc200c9f8bfce4503.png\"\n", + ",SPRITE_DIR / \"Egg.sprite3\" / \"f8ee449298c1446cb0ef281923a4e57a.png\"\n", + ",SPRITE_DIR / \"Egg.sprite3\" / \"fbc629c3b062423e8c09cfacfb1e65f8.png\"\n", + ",SPRITE_DIR / \"Elephant.sprite3\" / \"2c9b5e0125d95b8bc511f6bb09b5ea2f.png\"\n", + ",SPRITE_DIR / \"Elephant.sprite3\" / \"b59873e9558c1c456200f50e5ab34770.png\"\n", + ",SPRITE_DIR / \"Elf.sprite3\" / \"524406c2b1fe253c1565ff516309817e.png\"\n", + ",SPRITE_DIR / \"Elf.sprite3\" / \"808c6fa2eb1cba0de1d17b18c6f41279.png\"\n", + ",SPRITE_DIR / \"Elf.sprite3\" / \"92ff640b911a8348d2734c0e38bba68c.png\"\n", + ",SPRITE_DIR / \"Elf.sprite3\" / \"e92abad171396a3198455df8557802e5.png\"\n", + ",SPRITE_DIR / \"Elf.sprite3\" / \"ec458328a85f89f06866e2337076ac0a.png\"\n", + ",SPRITE_DIR / \"Fairy.sprite3\" / \"40d726e17bfd2ffeb8c0aa5393ee1c77.png\"\n", + ",SPRITE_DIR / \"Fairy.sprite3\" / \"902350bba0d4b4612db1e2e902b6f201.png\"\n", + ",SPRITE_DIR / \"Fairy.sprite3\" / \"bea920473027f43e04c44e588c6cc39a.png\"\n", + ",SPRITE_DIR / \"Fairy.sprite3\" / \"d4f6163a1610243f55dd9cf1c9875c61.png\"\n", + ",SPRITE_DIR / \"Fairy.sprite3\" / \"decd31f829032b1d4dcf5efdbd362cb9.png\"\n", + ",SPRITE_DIR / \"Fish.sprite3\" / \"4a3478b3cdc3e8688a671be88c2775fd.png\"\n", + ",SPRITE_DIR / \"Fish.sprite3\" / \"7a0c31c0087f342867d4754f8dc57541.png\"\n", + ",SPRITE_DIR / \"Fish.sprite3\" / \"886e0bb732453eb8d3a849b4eab54943.png\"\n", + ",SPRITE_DIR / \"Fish.sprite3\" / \"a9b3d163756621f8395592ad77fb9369.png\"\n", + ",SPRITE_DIR / \"Fishbowl.sprite3\" / \"17c53cf0296f24722ba5b001d513e58f.png\"\n", + ",SPRITE_DIR / \"Fishbowl.sprite3\" / \"b3db01c5cda32fe3ea0b48dde5fa8130.png\"\n", + ",SPRITE_DIR / \"Food Truck.sprite3\" / \"a77f9693f87288d023a4632cf019776e.png\"\n", + ",SPRITE_DIR / \"Food Truck.sprite3\" / \"e850e3c93de767519f7f78b38f16ed1d.png\"\n", + ",SPRITE_DIR / \"Food Truck.sprite3\" / \"f4150de2297a63c3efd125c8e12dd7cc.png\"\n", + ",SPRITE_DIR / \"Football.sprite3\" / \"7ee31371b2eafba57cc5a78fc1a787fe.png\"\n", + ",SPRITE_DIR / \"Football.sprite3\" / \"c717def72c8bd98749284d31b51d7097.png\"\n", + ",SPRITE_DIR / \"Fortune Cookie.sprite3\" / \"c56dcaa1fa4e3c9740142b93d5982850.png\"\n", + ",SPRITE_DIR / \"Fox.sprite3\" / \"2c256eacbb753be361e8e52a0eefde77.png\"\n", + ",SPRITE_DIR / \"Fox.sprite3\" / \"9dd59a4514b5373d4f665db78e145636.png\"\n", + ",SPRITE_DIR / \"Fox.sprite3\" / \"dd398ed81edb60c91ad4805f4437d2fa.png\"\n", + ",SPRITE_DIR / \"Frank.sprite3\" / \"10d39bb7e31647a465e747cd243b8cd0.png\"\n", + ",SPRITE_DIR / \"Frank.sprite3\" / \"26da9617218493f4f42a1592f21afee8.png\"\n", + ",SPRITE_DIR / \"Frank.sprite3\" / \"d16b76a634f7367ce7d6112401a78e57.png\"\n", + ",SPRITE_DIR / \"Frank.sprite3\" / \"e56e930cc0229d1042a673e7503209c5.png\"\n", + ",SPRITE_DIR / \"Frog 2 .sprite3\" / \"0717f446c991aac7df2fe4d6590354e7.png\"\n", + ",SPRITE_DIR / \"Frog 2 .sprite3\" / \"d9f69469090784d8dd68d94c0fd78a50.png\"\n", + ",SPRITE_DIR / \"Frog 2 .sprite3\" / \"f2246c13e4540472c484119bc314d954.png\"\n", + ",SPRITE_DIR / \"Frog.sprite3\" / \"390845c11df0924f3b627bafeb3f814e.png\"\n", + ",SPRITE_DIR / \"Fruit Platter.sprite3\" / \"6c3252378da3334f63eebddbed3fae91.png\"\n", + ",SPRITE_DIR / \"Fruit Salad.sprite3\" / \"2e6ef315101433b78e38719e8cc630c2.png\"\n", + ",SPRITE_DIR / \"Ghost.sprite3\" / \"40ba3a0b5b3899a655fd8867229d4ee3.png\"\n", + ",SPRITE_DIR / \"Ghost.sprite3\" / \"634744e3f98bee53e9cb477a63aa9b21.png\"\n", + ",SPRITE_DIR / \"Ghost.sprite3\" / \"d1d89391f1d9c74557e504456d58a002.png\"\n", + ",SPRITE_DIR / \"Ghost.sprite3\" / \"f522b08c5757569ad289d67bce290cd0.png\"\n", + ",SPRITE_DIR / \"Gift.sprite3\" / \"0fdd104de718c5fc4a65da429468bdbd.png\"\n", + ",SPRITE_DIR / \"Gift.sprite3\" / \"6cbeda5d391c6d107f0b853222f344d9.png\"\n", + ",SPRITE_DIR / \"Giga Walking.sprite3\" / \"3afad833094d8dff1c4ff79edcaa13d0.png\"\n", + ",SPRITE_DIR / \"Giga Walking.sprite3\" / \"d27716e022fb5f747d7b09fe6eeeca06.png\"\n", + ",SPRITE_DIR / \"Giga Walking.sprite3\" / \"db55131bf54f96e8986d9b30730e42ce.png\"\n", + ",SPRITE_DIR / \"Giga.sprite3\" / \"337b338b2b10176221e638ac537854e6.png\"\n", + ",SPRITE_DIR / \"Giga.sprite3\" / \"92161a11e851ecda94cbbb985018fed6.png\"\n", + ",SPRITE_DIR / \"Giga.sprite3\" / \"bc706a7648342aaacac9050378b40c43.png\"\n", + ",SPRITE_DIR / \"Giga.sprite3\" / \"db15886cfdcb5e2f4459e9074e3990a1.png\"\n", + ",SPRITE_DIR / \"Giraffe.sprite3\" / \"43e89629fb9df7051eaf307c695424fc.png\"\n", + ",SPRITE_DIR / \"Giraffe.sprite3\" / \"cfd93a103479993aee4d680655e39d8d.png\"\n", + ",SPRITE_DIR / \"Giraffe.sprite3\" / \"ef1fca2ae13d49d9dd2c6cfc211a687c.png\"\n", + ",SPRITE_DIR / \"Glass Water.sprite3\" / \"ca70c69ef1f797d353581a3f76116ae3.png\"\n", + ",SPRITE_DIR / \"Glass Water.sprite3\" / \"cbf21cf1b057852f91135d27ebbf11ce.png\"\n", + ",SPRITE_DIR / \"Glasses.sprite3\" / \"705035328ac53d5ce1aa5a1ed1c2d172.png\"\n", + ",SPRITE_DIR / \"Glasses.sprite3\" / \"9e2f75d3a09f3f10d554ba8380c3ae52.png\"\n", + ",SPRITE_DIR / \"Glasses.sprite3\" / \"acd85b36e6b8d93ba4194ee2ea334207.png\"\n", + ",SPRITE_DIR / \"Glasses.sprite3\" / \"f2a02d0e7431147b8a4a282e02a8e6a4.png\"\n", + ",SPRITE_DIR / \"Glow-0.sprite3\" / \"64b59074f24d0e2405a509a45c0dadba.png\"\n", + ",SPRITE_DIR / \"Glow-1.sprite3\" / \"9f75c26aa6c56168a3e5a4f598de2c94.png\"\n", + ",SPRITE_DIR / \"Glow-2.sprite3\" / \"e8d8bf59db37b5012dd643a16a636042.png\"\n", + ",SPRITE_DIR / \"Glow-3.sprite3\" / \"57f7afe3b9888cca56803b73a62e4227.png\"\n", + ",SPRITE_DIR / \"Glow-4.sprite3\" / \"b8209e1980475b30ff11e60d7633446d.png\"\n", + ",SPRITE_DIR / \"Glow-5.sprite3\" / \"aacb5b3cec637f192f080138b4ccd8d2.png\"\n", + ",SPRITE_DIR / \"Glow-6.sprite3\" / \"84d9f26050c709e6b98706c22d2efb3d.png\"\n", + ",SPRITE_DIR / \"Glow-7.sprite3\" / \"6194b9a251a905d0001a969990961724.png\"\n", + ",SPRITE_DIR / \"Glow-8.sprite3\" / \"55e95fb9c60fbebb7d20bba99c7e9609.png\"\n", + ",SPRITE_DIR / \"Glow-9.sprite3\" / \"0f53ee6a988bda07cba561d38bfbc36f.png\"\n", + ",SPRITE_DIR / \"Glow-A.sprite3\" / \"fd470938cce54248aaf240b16e845456.png\"\n", + ",SPRITE_DIR / \"Glow-B.sprite3\" / \"a699fa024889b681d8b8b6c5c86acb6d.png\"\n", + ",SPRITE_DIR / \"Glow-C.sprite3\" / \"51b8a7dd7a8cddc5bc30e35824cc557a.png\"\n", + ",SPRITE_DIR / \"Glow-D.sprite3\" / \"a3a66e37de8d7ebe0505594e036ef6d1.png\"\n", + ",SPRITE_DIR / \"Glow-E.sprite3\" / \"80382a5db3fa556276068165c547b432.png\"\n", + ",SPRITE_DIR / \"Glow-F.sprite3\" / \"67239f7d47f7b92bc38e2d8b275d54ab.png\"\n", + ",SPRITE_DIR / \"Glow-G.sprite3\" / \"56839bc48957869d980c6f9b6f5a2a91.png\"\n", + ",SPRITE_DIR / \"Glow-H.sprite3\" / \"d6016c6494153cd5735ee4b6a1b05277.png\"\n", + ",SPRITE_DIR / \"Glow-I.sprite3\" / \"9077988af075c80cc403b1d6e5891528.png\"\n", + ",SPRITE_DIR / \"Glow-J.sprite3\" / \"6c359eff57abf5bb6db55894d08757c3.png\"\n", + ",SPRITE_DIR / \"Glow-K.sprite3\" / \"e932898d1e6fe3950a266fccaba0c3e6.png\"\n", + ",SPRITE_DIR / \"Glow-L.sprite3\" / \"dcee9202cf20e0395971f1ee73c45d37.png\"\n", + ",SPRITE_DIR / \"Glow-M.sprite3\" / \"26f81aa5990bf2371acaa8d76fe1e87f.png\"\n", + ",SPRITE_DIR / \"Glow-N.sprite3\" / \"d55a04ada14958eccc4aef446a4dad57.png\"\n", + ",SPRITE_DIR / \"Glow-O.sprite3\" / \"64b59074f24d0e2405a509a45c0dadba.png\"\n", + ",SPRITE_DIR / \"Glow-P.sprite3\" / \"c6edc2603ad4db3aa0b29f80e3e38cff.png\"\n", + ",SPRITE_DIR / \"Glow-Q.sprite3\" / \"e4ae18bf8b92ae375ce818d754588c76.png\"\n", + ",SPRITE_DIR / \"Glow-R.sprite3\" / \"bb11b49e19c68452331e78d51081ab42.png\"\n", + ",SPRITE_DIR / \"Glow-S.sprite3\" / \"6fd994b41bcf776fbf1f1521a879f1af.png\"\n", + ",SPRITE_DIR / \"Glow-T.sprite3\" / \"d687543649a676a14f408b5890d45f05.png\"\n", + ",SPRITE_DIR / \"Glow-U.sprite3\" / \"cb8ef2244400a57ba08e918cb4fe8bba.png\"\n", + ",SPRITE_DIR / \"Glow-V.sprite3\" / \"c6edc1ac2c5979f389598537cfb28096.png\"\n", + ",SPRITE_DIR / \"Glow-W.sprite3\" / \"2e0c2bb46c4ca3cf97779f749b1556f6.png\"\n", + ",SPRITE_DIR / \"Glow-X.sprite3\" / \"0b98a63dcc55251072a95a6c6bf7f6f2.png\"\n", + ",SPRITE_DIR / \"Glow-Y.sprite3\" / \"532494c9b5e6709f9982c00a48ce6870.png\"\n", + ",SPRITE_DIR / \"Glow-Z.sprite3\" / \"2d94d83dcc9ee3a107e5ea7ef0dddeb0.png\"\n", + ",SPRITE_DIR / \"Goalie.sprite3\" / \"59eedd0a23c3c983d386a0c125991c7f.png\"\n", + ",SPRITE_DIR / \"Goalie.sprite3\" / \"63f2955298d59dd22dc7b7c6a9c521e2.png\"\n", + ",SPRITE_DIR / \"Goalie.sprite3\" / \"a554f2a9b49a09ec67d1fd7ecfbcddcd.png\"\n", + ",SPRITE_DIR / \"Goalie.sprite3\" / \"eb096e2b4234f5f8ee1f2c44429eaa1a.png\"\n", + ",SPRITE_DIR / \"Goalie.sprite3\" / \"f2e7ba53f3a28c4359cb0d3e3cb4001a.png\"\n", + ",SPRITE_DIR / \"Goblin.sprite3\" / \"2add9ef4eaa25f8915406dcfd8bafc9f.png\"\n", + ",SPRITE_DIR / \"Goblin.sprite3\" / \"3f08380f25062b8055a1800f5dad14bd.png\"\n", + ",SPRITE_DIR / \"Goblin.sprite3\" / \"afb9fe328adae617ee3375366fca02e7.png\"\n", + ",SPRITE_DIR / \"Goblin.sprite3\" / \"b8604b8039d6b633015aaf17d74d5d5b.png\"\n", + ",SPRITE_DIR / \"Gobo.sprite3\" / \"5c0896569305ab177d87caa31aad2a72.png\"\n", + ",SPRITE_DIR / \"Gobo.sprite3\" / \"9d8021c216fb92cc708e1e96f3ed2b52.png\"\n", + ",SPRITE_DIR / \"Gobo.sprite3\" / \"f505a4e9eab5e40e2669a4462dba4c90.png\"\n", + ",SPRITE_DIR / \"Grasshopper.sprite3\" / \"529644c5ecdca63adafd87777e341ad7.png\"\n", + ",SPRITE_DIR / \"Grasshopper.sprite3\" / \"93550d8abde130ad149904c4448f8b65.png\"\n", + ",SPRITE_DIR / \"Grasshopper.sprite3\" / \"a7c638b8aa86f2a758830f8c2b0e4cf5.png\"\n", + ",SPRITE_DIR / \"Grasshopper.sprite3\" / \"cf2ac769df444137b4c1eec472fa4b92.png\"\n", + ",SPRITE_DIR / \"Grasshopper.sprite3\" / \"d4f3dfe69be6537e73544381408a820d.png\"\n", + ",SPRITE_DIR / \"Grasshopper.sprite3\" / \"e7210a370837dd1e4ebc1a56a973b7f6.png\"\n", + ",SPRITE_DIR / \"Green Flag.sprite3\" / \"2bbfd072183a67db5eddb923fe0726b3.png\"\n", + ",SPRITE_DIR / \"Griffin.sprite3\" / \"102f6200c13bd60afa9538c712776fb0.png\"\n", + ",SPRITE_DIR / \"Griffin.sprite3\" / \"157d3665cebcd41fa814b9217af99476.png\"\n", + ",SPRITE_DIR / \"Griffin.sprite3\" / \"a31166d45903206b52cb0f0a0cb687b5.png\"\n", + ",SPRITE_DIR / \"Griffin.sprite3\" / \"b8c8745820a341afec08e77f4a254551.png\"\n", + ",SPRITE_DIR / \"Guitar-electric1.sprite3\" / \"57c6d7dc148576cb2f36e53dea49260a.png\"\n", + ",SPRITE_DIR / \"Guitar-electric1.sprite3\" / \"677aed0b1168caf4b3ec565b9104dbe0.png\"\n", + ",SPRITE_DIR / \"Guitar-electric2.sprite3\" / \"83db2d0e342257e534ccdf0ec17bf668.png\"\n", + ",SPRITE_DIR / \"Guitar-electric2.sprite3\" / \"bb88e6a8a08a4034cc155b1137743ca1.png\"\n", + ",SPRITE_DIR / \"Guitar.sprite3\" / \"8704489dcf1a3ca93c5db40ebe5acd38.png\"\n", + ",SPRITE_DIR / \"Guitar.sprite3\" / \"e0423f4743f39456dade16fa1223d6b0.png\"\n", + ",SPRITE_DIR / \"Hannah.sprite3\" / \"5fdce07935156bbcf943793fa84e826c.png\"\n", + ",SPRITE_DIR / \"Hannah.sprite3\" / \"b983d99560313e38b4b3cd36cbd5f0d1.png\"\n", + ",SPRITE_DIR / \"Hannah.sprite3\" / \"d0c3b4b24fbf1152de3ebb68f6b875ae.png\"\n", + ",SPRITE_DIR / \"Hare.sprite3\" / \"7269593d83b6f9eae512997f541a7417.png\"\n", + ",SPRITE_DIR / \"Hare.sprite3\" / \"85a3b8c151e10576fa531a4293fdac00.png\"\n", + ",SPRITE_DIR / \"Hare.sprite3\" / \"c8dbb4302dd489a201938c203018c2f0.png\"\n", + ",SPRITE_DIR / \"Harper.sprite3\" / \"3a0973a042ee16e816c568651316d5d4.png\"\n", + ",SPRITE_DIR / \"Harper.sprite3\" / \"98ce6e6bb99f8ba116f127fdf2e739fd.png\"\n", + ",SPRITE_DIR / \"Harper.sprite3\" / \"e407fa0ed992393d12d0a108c11e2fa6.png\"\n", + ",SPRITE_DIR / \"Hat1 .sprite3\" / \"0aed53a86d92ec2283068000ac97a60b.png\"\n", + ",SPRITE_DIR / \"Hat1 .sprite3\" / \"13e382ae3f05a9a23e0b64ca23230438.png\"\n", + ",SPRITE_DIR / \"Hat1 .sprite3\" / \"6349e36da9897a2f89bdbf5c77dbdacb.png\"\n", + ",SPRITE_DIR / \"Hat1 .sprite3\" / \"c632719725400c604fcadf0858ce2b2c.png\"\n", + ",SPRITE_DIR / \"Hatchling.sprite3\" / \"0e5c295a043d5e183a98046e4f734b72.png\"\n", + ",SPRITE_DIR / \"Hatchling.sprite3\" / \"55f7d457eb0af78cb309ca47497c490f.png\"\n", + ",SPRITE_DIR / \"Hatchling.sprite3\" / \"f27d557be70a9522fae4392bfd4f5249.png\"\n", + ",SPRITE_DIR / \"Heart Candy.sprite3\" / \"288976865e8c5db717d859e915606d82.png\"\n", + ",SPRITE_DIR / \"Heart Candy.sprite3\" / \"3ee430ba825f41ae9913453d4932fb8b.png\"\n", + ",SPRITE_DIR / \"Heart Candy.sprite3\" / \"51248e76be2aa7a0f0ed77bc94af1b3a.png\"\n", + ",SPRITE_DIR / \"Heart Candy.sprite3\" / \"5fa8c4693cf8cba8cdbcbed72f4f58aa.png\"\n", + ",SPRITE_DIR / \"Heart Face.sprite3\" / \"989770846f8cd1628b48bbe91d0a7d0d.png\"\n", + ",SPRITE_DIR / \"Heart.sprite3\" / \"c77e640f6e023e7ce1e376da0f26e1eb.png\"\n", + ",SPRITE_DIR / \"Heart.sprite3\" / \"e24731f5cf2759c2f289921bebb86ea2.png\"\n", + ",SPRITE_DIR / \"Hedgehog.sprite3\" / \"1fcbba4a2252e96c52d2d8aa8e593e51.png\"\n", + ",SPRITE_DIR / \"Hedgehog.sprite3\" / \"3251533232e7f44315512149c7f76214.png\"\n", + ",SPRITE_DIR / \"Hedgehog.sprite3\" / \"3b0e1717859808cecf1a45e2a32dc201.png\"\n", + ",SPRITE_DIR / \"Hedgehog.sprite3\" / \"42bac40ca828133600e0a9f7ba019adb.png\"\n", + ",SPRITE_DIR / \"Hedgehog.sprite3\" / \"93c2d7a0abefaf26ee50d5038ac5bf61.png\"\n", + ",SPRITE_DIR / \"Hen.sprite3\" / \"6c9e05f568862dbcea0a1652a210239b.png\"\n", + ",SPRITE_DIR / \"Hen.sprite3\" / \"b02a33e32313cc9a75781a6fafd07033.png\"\n", + ",SPRITE_DIR / \"Hen.sprite3\" / \"c9a4570a2d0ae09b9feeeb5607e4b9c7.png\"\n", + ",SPRITE_DIR / \"Hen.sprite3\" / \"d055896a473bb12f4ec67af1fdb9c652.png\"\n", + ",SPRITE_DIR / \"Hippo1.sprite3\" / \"5764a2c650f225bc27cc0e6c5db401ea.png\"\n", + ",SPRITE_DIR / \"Hippo1.sprite3\" / \"911901dc568b56c15fe81819bc2af653.png\"\n", + ",SPRITE_DIR / \"Home Button.sprite3\" / \"1ebdcb9f033fa6658259b52da376b7ac.png\"\n", + ",SPRITE_DIR / \"Horse.sprite3\" / \"0e0fa871bea01c2dfb70e9955dc098be.png\"\n", + ",SPRITE_DIR / \"Horse.sprite3\" / \"ad458251c5bf5b375870829f1762fa47.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"3ddc912edef87ae29121f57294fa0cb5.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"4b9d2162e30dbb924840575ed35fddb0.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"5883bdefba451aaeac8d77c798d41eb0.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"5a683f4536abca0f83a77bc341df4c9a.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"63e56d28cc3e3d9b735e1f1d51248cc0.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"7fb579a98d6db257f1b16109d3c4609a.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"d6cc9814f7a6640e4c2b1a4276987dc5.png\"\n", + ",SPRITE_DIR / \"Jamal.sprite3\" / \"2408318e743873c7254db1623441b9c5.png\"\n", + ",SPRITE_DIR / \"Jamal.sprite3\" / \"3c8d5e688450ad1e6bf024a32c55bcda.png\"\n", + ",SPRITE_DIR / \"Jamal.sprite3\" / \"693748d763c8da4b119a5e4bee6a1768.png\"\n", + ",SPRITE_DIR / \"Jamal.sprite3\" / \"92692e0c0f376797274392484ba74133.png\"\n", + ",SPRITE_DIR / \"Jar.sprite3\" / \"33b537168f3c2eb3dafeb739c22f38a6.png\"\n", + ",SPRITE_DIR / \"Jar.sprite3\" / \"e0f5ac773987470ff2467e3e01b9ab23.png\"\n", + ",SPRITE_DIR / \"Jellyfish.sprite3\" / \"00c99df84f8385038461d6c42a5465ab.png\"\n", + ",SPRITE_DIR / \"Jellyfish.sprite3\" / \"3158299771b3d34ed2c50a00fbab715e.png\"\n", + ",SPRITE_DIR / \"Jellyfish.sprite3\" / \"4e259b7c08f05145fc7800b33e4f356e.png\"\n", + ",SPRITE_DIR / \"Jellyfish.sprite3\" / \"5944a1e687fa31589517825b2144a17b.png\"\n", + ",SPRITE_DIR / \"Jordyn.sprite3\" / \"00c8c464c19460df693f8d5ae69afdab.png\"\n", + ",SPRITE_DIR / \"Jordyn.sprite3\" / \"768c4601174f0dfcb96b3080ccc3a192.png\"\n", + ",SPRITE_DIR / \"Jordyn.sprite3\" / \"a7cc1e5f02b58ecc8095cfc18eef0289.png\"\n", + ",SPRITE_DIR / \"Jordyn.sprite3\" / \"db4d97cbf24e2b8af665bfbf06f67fa0.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"01dd2f553c7262329ebaba2516e3a2b1.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"0ed4a09c41871d150c51119c1bceded2.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"2e2a6534d33883fdd2f8471a1adbebb7.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"3d3ea804243800981acabc7caba10939.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"6f68790ee3eb9bdccf8749305186b0dd.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"75ee2383fd83992b401c8a0730521d94.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"a12f40b18067bb31746f9cf461de88aa.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"a55fbb529c10f70bcb374aef8a63571b.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"a9fbc01a4124d555da12630312e46197.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"aabfedff0d11243386b6b0941e0f72e9.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"c2d5519e8a0f2214ff757117038c28dc.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"ea812b4c2b2405aa2b73158023298f71.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"efaa8eb6c8cf7dc35d4d37d546ebd333.png\"\n", + ",SPRITE_DIR / \"Kai.sprite3\" / \"6e007fde15e49c66ee7996561f80b452.png\"\n", + ",SPRITE_DIR / \"Kai.sprite3\" / \"c1e1149f6d7e308e3e4eba14ccc8a751.png\"\n", + ",SPRITE_DIR / \"Key.sprite3\" / \"680d3e4dce002f922b32447fcf29743d.png\"\n", + ",SPRITE_DIR / \"Keyboard.sprite3\" / \"0ad880b5e829578832c8927b3f6ef7f8.png\"\n", + ",SPRITE_DIR / \"Keyboard.sprite3\" / \"6efd23c91dab070526feacdf72e2d3da.png\"\n", + ",SPRITE_DIR / \"Kia.sprite3\" / \"b3d0a248adbc26b0d0826e042a81670a.png\"\n", + ",SPRITE_DIR / \"Kia.sprite3\" / \"db6cd6b145bb6d8dc299475af7423d6e.png\"\n", + ",SPRITE_DIR / \"Kia.sprite3\" / \"e56e480c994572323d88355b8733e1a3.png\"\n", + ",SPRITE_DIR / \"Kiran.sprite3\" / \"2928e9fbd5ca08e326192b3a41bea691.png\"\n", + ",SPRITE_DIR / \"Kiran.sprite3\" / \"78bd6de23d4929aef678ddf0f3f5c276.png\"\n", + ",SPRITE_DIR / \"Kiran.sprite3\" / \"7912b6f378bd781f62683e003c574dbe.png\"\n", + ",SPRITE_DIR / \"Kiran.sprite3\" / \"7c0bedab5404830a5147cc4a2d46e997.png\"\n", + ",SPRITE_DIR / \"Kiran.sprite3\" / \"7f0bc123819fc2666321b6cd38069bdb.png\"\n", + ",SPRITE_DIR / \"Kiran.sprite3\" / \"b0566e0eed7b5216b92d61468d21ecee.png\"\n", + ",SPRITE_DIR / \"Knight.sprite3\" / \"188325c56b79ff3cd58497c970ba87a6.png\"\n", + ",SPRITE_DIR / \"Ladybug1.sprite3\" / \"169c0efa8c094fdedddf8c19c36f0229.png\"\n", + ",SPRITE_DIR / \"Ladybug2.sprite3\" / \"3f48228829b77fc47d6d89b5729b2957.png\"\n", + ",SPRITE_DIR / \"Ladybug2.sprite3\" / \"457200f8dec8fea00d22473e9bd9175e.png\"\n", + ",SPRITE_DIR / \"Laptop.sprite3\" / \"cd2d1f72275e676df5f82be74ae91dfa.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"0725440743391e7c622bb5df6a94e1d4.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"0a2461b3b9a4b8603e75565d78b1d4d7.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"4423159d81378ada5ffd7f053d7ef471.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"525285312925e1e6b4e237a119b61305.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"563f86443cb102b9241cebb62eb2d81a.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"5f176ef763be18f7c342dc2e2de7bf16.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"63d099e94aa8a973dcfa4c5d8b4a3e7a.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"645d6e2674452009df7a9a844a604791.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"71dde8c43985815bffb5a5ed5632af58.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"79ca528d13ffb557a236f0a35a0eb486.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"b508808c087adb55ce156f5cfbdac61b.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"cdd52259075b75628001672d375e4985.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"e68d899e178309ff3eae3e1de8a8ec28.png\"\n", + ",SPRITE_DIR / \"Lightning.sprite3\" / \"0ddd3a05a330925bcd2d048908ed40b8.png\"\n", + ",SPRITE_DIR / \"Line.sprite3\" / \"e85305b47cfd92d971704dcb7ad6e17b.png\"\n", + ",SPRITE_DIR / \"Lion.sprite3\" / \"91c64c5361d906fd36d5813ae27b85a8.png\"\n", + ",SPRITE_DIR / \"Lion.sprite3\" / \"e88e83c8b3ca80c54540b5f0c5a0cc03.png\"\n", + ",SPRITE_DIR / \"Lion.sprite3\" / \"f0d9ab3d82bbade6e279dc1c81e2e6db.png\"\n", + ",SPRITE_DIR / \"Llama.sprite3\" / \"1f3aaeb598e121ad817143800d8c4a32.png\"\n", + ",SPRITE_DIR / \"Llama.sprite3\" / \"ac80d75745315f052f7f7b4e62e4a850.png\"\n", + ",SPRITE_DIR / \"Llama.sprite3\" / \"c97824f20a45adfa3ff362f82247a025.png\"\n", + ",SPRITE_DIR / \"Luca.sprite3\" / \"18dfad514602a4907502c7c84861b24e.png\"\n", + ",SPRITE_DIR / \"Luca.sprite3\" / \"90fa2ad340edc6e6ba963710feef940e.png\"\n", + ",SPRITE_DIR / \"Luca.sprite3\" / \"963cb82687acaf5de53a22b287192723.png\"\n", + ",SPRITE_DIR / \"Magic Wand.sprite3\" / \"89aa5332042d7bbf8368293a4efeafa4.png\"\n", + ",SPRITE_DIR / \"Marian.sprite3\" / \"16893c6136292ae36e13dc72cc55719b.png\"\n", + ",SPRITE_DIR / \"Marian.sprite3\" / \"221e9999b20ecc21b37c68fcdf09ab02.png\"\n", + ",SPRITE_DIR / \"Marian.sprite3\" / \"3d2ecee35eab8c37d1c3eadfe50ce447.png\"\n", + ",SPRITE_DIR / \"Marian.sprite3\" / \"64206b46c411e40926569cf3f5e587be.png\"\n", + ",SPRITE_DIR / \"Marian.sprite3\" / \"e9577a1eb098905dd386135bb38c0398.png\"\n", + ",SPRITE_DIR / \"Max.sprite3\" / \"5180649cfd62831c52f8994ce644d6ac.png\"\n", + ",SPRITE_DIR / \"Max.sprite3\" / \"6b91183a4ad162e4950d95828a85144d.png\"\n", + ",SPRITE_DIR / \"Max.sprite3\" / \"7b3d1324382032f87384ef2c8c618156.png\"\n", + ",SPRITE_DIR / \"Max.sprite3\" / \"9669ce16eb6c6df6f26686598a59711d.png\"\n", + ",SPRITE_DIR / \"Mermaid.sprite3\" / \"2a6274017350fab67ebec9157420ae96.png\"\n", + ",SPRITE_DIR / \"Mermaid.sprite3\" / \"65419296861b1c7ee59075af0f949d67.png\"\n", + ",SPRITE_DIR / \"Mermaid.sprite3\" / \"88a3b6b2f0b3ffa25cab97bc619f8386.png\"\n", + ",SPRITE_DIR / \"Mermaid.sprite3\" / \"f903049308e2171178d889f5c4a7d466.png\"\n", + ",SPRITE_DIR / \"Microphone.sprite3\" / \"c96578ffb9e314fee097862d69fde0af.png\"\n", + ",SPRITE_DIR / \"Microphone.sprite3\" / \"d4d80e94e2cc759b8ca1d7b58f2a9052.png\"\n", + ",SPRITE_DIR / \"Milk.sprite3\" / \"0f683f65c737bbcbb916df0895d8436e.png\"\n", + ",SPRITE_DIR / \"Milk.sprite3\" / \"1fa49d62f8028a375470e7bac451e666.png\"\n", + ",SPRITE_DIR / \"Milk.sprite3\" / \"4d3eabd3ef848b61c3120d796c274733.png\"\n", + ",SPRITE_DIR / \"Milk.sprite3\" / \"6ec300ae45758eff12e9d47cf4f0d2a0.png\"\n", + ",SPRITE_DIR / \"Milk.sprite3\" / \"aa5f1501805aa68d3ad74623f59e6135.png\"\n", + ",SPRITE_DIR / \"Monet.sprite3\" / \"137bbc522701a96908667d1b1730d041.png\"\n", + ",SPRITE_DIR / \"Monet.sprite3\" / \"138e6591f3317222521963ef3ce9a057.png\"\n", + ",SPRITE_DIR / \"Monet.sprite3\" / \"4c6b016c55c4348b6dce29ba99e7ede4.png\"\n", + ",SPRITE_DIR / \"Monet.sprite3\" / \"5b67cb843dcc9dabdc580b9e35e95659.png\"\n", + ",SPRITE_DIR / \"Monet.sprite3\" / \"740276a8aa9ddd12dd4b30f369975d66.png\"\n", + ",SPRITE_DIR / \"Monkey.sprite3\" / \"254926ee81bfa82f2db7009a80635061.png\"\n", + ",SPRITE_DIR / \"Monkey.sprite3\" / \"de0405b0576ade1282bdfcd198922baa.png\"\n", + ",SPRITE_DIR / \"Monkey.sprite3\" / \"ec6d62f0ff64bb5440ffdc662b6e46fa.png\"\n", + ",SPRITE_DIR / \"Motorcycle.sprite3\" / \"6e960b3c6a60ebe192e36b235c50ae03.png\"\n", + ",SPRITE_DIR / \"Motorcycle.sprite3\" / \"a70bdd403ace1f1ece2f2af0fbc3c720.png\"\n", + ",SPRITE_DIR / \"Motorcycle.sprite3\" / \"b73447c2577b8f77b5e2eb1da6d6445a.png\"\n", + ",SPRITE_DIR / \"Motorcycle.sprite3\" / \"c6f8179ff3e8f8ab08b01d50343eefc4.png\"\n", + ",SPRITE_DIR / \"Mouse1.sprite3\" / \"8a7da35c473972f88896ca73b7df2188.png\"\n", + ",SPRITE_DIR / \"Mouse1.sprite3\" / \"c5f76b65e30075c12d49ea8a8f7d6bad.png\"\n", + ",SPRITE_DIR / \"Muffin.sprite3\" / \"afa34381db44e699d61f774911aab448.png\"\n", + ",SPRITE_DIR / \"Muffin.sprite3\" / \"bd0581902cd6cc13888520776bf1620c.png\"\n", + ",SPRITE_DIR / \"Nano.sprite3\" / \"8f2f4a70e87262ef478ce60567b6208a.png\"\n", + ",SPRITE_DIR / \"Nano.sprite3\" / \"a4e2034751fa650fd5fd69432c110104.png\"\n", + ",SPRITE_DIR / \"Nano.sprite3\" / \"a62e560863c0e49b12e5d57e13d084f1.png\"\n", + ",SPRITE_DIR / \"Nano.sprite3\" / \"d12aead3e3c2917e7eba8b2b90a7afd2.png\"\n", + ",SPRITE_DIR / \"Neigh Pony.sprite3\" / \"592816f56409d582603c485cbefcbbb8.png\"\n", + ",SPRITE_DIR / \"Noor.sprite3\" / \"4cf233c6540e434aded60608ba316ce3.png\"\n", + ",SPRITE_DIR / \"Noor.sprite3\" / \"975585ca9461f0730a285fc96df73425.png\"\n", + ",SPRITE_DIR / \"Noor.sprite3\" / \"c1792bbd5970034b4595ff7e742d6e47.png\"\n", + ",SPRITE_DIR / \"Octopus.sprite3\" / \"5d6e17d6260134d0402ba487a419d7c3.png\"\n", + ",SPRITE_DIR / \"Octopus.sprite3\" / \"7d33a531087188b29deae879f23f76bc.png\"\n", + ",SPRITE_DIR / \"Octopus.sprite3\" / \"9b5a2cd287229bf36ffcc176ed72cc0c.png\"\n", + ",SPRITE_DIR / \"Octopus.sprite3\" / \"e22d9b633feffc1d026980a1f21e07d7.png\"\n", + ",SPRITE_DIR / \"Octopus.sprite3\" / \"f582f162c4438d82c9e2a0a87a3e02ce.png\"\n", + ",SPRITE_DIR / \"Orange.sprite3\" / \"d0a55aae1decb57152b454c9a5226757.png\"\n", + ",SPRITE_DIR / \"Orange2.sprite3\" / \"27286ca08451bc512e1d611965dad061.png\"\n", + ",SPRITE_DIR / \"Orange2.sprite3\" / \"b823f73a31e61fd362574e2c24dfc0c2.png\"\n", + ",SPRITE_DIR / \"Outfielder.sprite3\" / \"10578b06f97b9fdc34f622e9e682c144.png\"\n", + ",SPRITE_DIR / \"Outfielder.sprite3\" / \"175ddc7ed99cc5b72909098046d8f558.png\"\n", + ",SPRITE_DIR / \"Outfielder.sprite3\" / \"9f31c772f88a5f32fe857d57b3bcb04c.png\"\n", + ",SPRITE_DIR / \"Outfielder.sprite3\" / \"d0a8837867d39444a824b734d4cd5554.png\"\n", + ",SPRITE_DIR / \"Owl.sprite3\" / \"236bb6b33e7db00834bcea89b03b8a5e.png\"\n", + ",SPRITE_DIR / \"Owl.sprite3\" / \"806139207066cb5eaef727d54c1bb4ec.png\"\n", + ",SPRITE_DIR / \"Owl.sprite3\" / \"a518f70b65ec489e709795209b43207a.png\"\n", + ",SPRITE_DIR / \"Paddle.sprite3\" / \"15864fac7d38bb94c1ec3a199de96c26.png\"\n", + ",SPRITE_DIR / \"Panther.sprite3\" / \"0e7c244f54b27058f8b17d9e0d3cee12.png\"\n", + ",SPRITE_DIR / \"Panther.sprite3\" / \"4a762fd04901407544d8858adac2b3fa.png\"\n", + ",SPRITE_DIR / \"Panther.sprite3\" / \"a7aee991f51636574625c1300f035bdd.png\"\n", + ",SPRITE_DIR / \"Pants.sprite3\" / \"ac9c7259873e472c2c1a99339c694f16.png\"\n", + ",SPRITE_DIR / \"Pants.sprite3\" / \"ef8b1576f183222a4c2d373a7bc194cc.png\"\n", + ",SPRITE_DIR / \"Parrot.sprite3\" / \"036fad20b674197358f8c0b2dc64e17e.png\"\n", + ",SPRITE_DIR / \"Parrot.sprite3\" / \"082f371c206f07d20e53595a9c69cc22.png\"\n", + ",SPRITE_DIR / \"Party Hats.sprite3\" / \"1d14be44e4aa99a471115cd874204690.png\"\n", + ",SPRITE_DIR / \"Party Hats.sprite3\" / \"8b43413906cf1ba1343580d3ca062048.png\"\n", + ",SPRITE_DIR / \"Party Hats.sprite3\" / \"abefb98344ece228afeb462f46d6b750.png\"\n", + ",SPRITE_DIR / \"Pencil.sprite3\" / \"b3d6eae85f285dd618bf9dcf609b9454.png\"\n", + ",SPRITE_DIR / \"Pencil.sprite3\" / \"f017876452a24d118fc0b1753caefad9.png\"\n", + ",SPRITE_DIR / \"Penguin 2.sprite3\" / \"280d2aa13f0c6774cc8828dc177aaf60.png\"\n", + ",SPRITE_DIR / \"Penguin 2.sprite3\" / \"428772307d90f4b347d6cc3c0d8e76ef.png\"\n", + ",SPRITE_DIR / \"Penguin 2.sprite3\" / \"780467f3d173dcb37fd65834841babc6.png\"\n", + ",SPRITE_DIR / \"Penguin 2.sprite3\" / \"d485f5620d2dde69a6aa1cda7c897d12.png\"\n", + ",SPRITE_DIR / \"Penguin.sprite3\" / \"6d11aedea7f316215aaa0d08617f4c31.png\"\n", + ",SPRITE_DIR / \"Penguin.sprite3\" / \"c434b674f2da18ba13cdfe51dbc05ecc.png\"\n", + ",SPRITE_DIR / \"Penguin.sprite3\" / \"dad5b0d82cb6e053d1ded2ef537a9453.png\"\n", + ",SPRITE_DIR / \"Pico Walking.sprite3\" / \"22fb16ae7cc18187a7adaf2852f07884.png\"\n", + ",SPRITE_DIR / \"Pico Walking.sprite3\" / \"52a60eccb624530fd3a24fc41fbad6e5.png\"\n", + ",SPRITE_DIR / \"Pico Walking.sprite3\" / \"702bd644d01ea8eda2ea122daeea7d74.png\"\n", + ",SPRITE_DIR / \"Pico Walking.sprite3\" / \"c8f58f31cabf4acabb3f828730061276.png\"\n", + ",SPRITE_DIR / \"Pico.sprite3\" / \"a7597b1f0c13455d335a3d4fe77da528.png\"\n", + ",SPRITE_DIR / \"Pico.sprite3\" / \"bcc0e8a5dda3a813608902b887c87bb4.png\"\n", + ",SPRITE_DIR / \"Pico.sprite3\" / \"d6dfa2efe58939af4c85755feb3c0375.png\"\n", + ",SPRITE_DIR / \"Pico.sprite3\" / \"e7ce31db37f7abd2901499db2e9ad83a.png\"\n", + ",SPRITE_DIR / \"Pitcher.sprite3\" / \"049132404cb2cb157830aaf18aee6a24.png\"\n", + ",SPRITE_DIR / \"Pitcher.sprite3\" / \"ae8aa57ce6e5729d30d8b785bec97774.png\"\n", + ",SPRITE_DIR / \"Pitcher.sprite3\" / \"bceae719ba1ec230afec56f14a1e4d52.png\"\n", + ",SPRITE_DIR / \"Pitcher.sprite3\" / \"fc955dec7f1e97f1ddd9f8245a80907e.png\"\n", + ",SPRITE_DIR / \"Planet2.sprite3\" / \"50cde8a4a737da0eba1ab73eb263f836.png\"\n", + ",SPRITE_DIR / \"Polar Bear.sprite3\" / \"11d00a06abd2c882672464f4867e90b6.png\"\n", + ",SPRITE_DIR / \"Polar Bear.sprite3\" / \"5d7cd81aad80100368b8b77bf09ad576.png\"\n", + ",SPRITE_DIR / \"Polar Bear.sprite3\" / \"d050a3394b61ade080f7963c40192e7d.png\"\n", + ",SPRITE_DIR / \"Potion.sprite3\" / \"0eceab4561534dde827bf68233f47441.png\"\n", + ",SPRITE_DIR / \"Potion.sprite3\" / \"d922ffdfe38fd30fd8787810c6bce318.png\"\n", + ",SPRITE_DIR / \"Potion.sprite3\" / \"f8500e9530bf1136c6386f2a329519dd.png\"\n", + ",SPRITE_DIR / \"Prince.sprite3\" / \"ada9c5ce11245c467c780bceb665c42d.png\"\n", + ",SPRITE_DIR / \"Princess.sprite3\" / \"0721f5238a2bcde49d05f72ca9d21d9b.png\"\n", + ",SPRITE_DIR / \"Princess.sprite3\" / \"23330150c0a09180083b597cbfeca99a.png\"\n", + ",SPRITE_DIR / \"Princess.sprite3\" / \"39157d5d3280ab0b273260170d5436c2.png\"\n", + ",SPRITE_DIR / \"Princess.sprite3\" / \"ba37f578cc6cabce6fe4d2864c9eb96f.png\"\n", + ",SPRITE_DIR / \"Princess.sprite3\" / \"e59f55c86ea557bdbd88302012ce8db5.png\"\n", + ",SPRITE_DIR / \"Pufferfish.sprite3\" / \"1b4f39763c9848cc840522b95cc6d8ae.png\"\n", + ",SPRITE_DIR / \"Pufferfish.sprite3\" / \"2266c6bb2c3a8fb80783518a08852b4a.png\"\n", + ",SPRITE_DIR / \"Pufferfish.sprite3\" / \"b8aa1bd46eacc054c695b89167c3ad28.png\"\n", + ",SPRITE_DIR / \"Pufferfish.sprite3\" / \"e73e71718306f6c7085305dba142c315.png\"\n", + ",SPRITE_DIR / \"Puppy.sprite3\" / \"05630bfa94501a3e5d61ce443a0cea70.png\"\n", + ",SPRITE_DIR / \"Puppy.sprite3\" / \"2768d9e44a0aab055856d301bbc2b04e.png\"\n", + ",SPRITE_DIR / \"Puppy.sprite3\" / \"c4aeb5c39b39ef57a3f18ace54cf7db1.png\"\n", + ",SPRITE_DIR / \"Puppy.sprite3\" / \"c7817052ed9e78057f877d0d56b5c6a6.png\"\n", + ",SPRITE_DIR / \"Rabbit.sprite3\" / \"137976ec71439e2f986caeaa70e4c932.png\"\n", + ",SPRITE_DIR / \"Rabbit.sprite3\" / \"1ca3f829a2c9f7fa4d1df295fe5f787c.png\"\n", + ",SPRITE_DIR / \"Rabbit.sprite3\" / \"49169d752f20d27fb71022b16044d759.png\"\n", + ",SPRITE_DIR / \"Rabbit.sprite3\" / \"90677c6f16380ef077d6115f6a6371ff.png\"\n", + ",SPRITE_DIR / \"Rabbit.sprite3\" / \"970f886bfa454e1daa6d6c30ef49a972.png\"\n", + ",SPRITE_DIR / \"Radio.sprite3\" / \"828f0762d028605f6fe52f9287555b74.png\"\n", + ",SPRITE_DIR / \"Radio.sprite3\" / \"e96676f038fc523b40392dc1676552dc.png\"\n", + ",SPRITE_DIR / \"Rainbow.sprite3\" / \"033979eba12e4572b2520bd93a87583e.png\"\n", + ",SPRITE_DIR / \"Referee.sprite3\" / \"1cd641a48499db84636d983916b62a83.png\"\n", + ",SPRITE_DIR / \"Referee.sprite3\" / \"46dde2baba61a7e48463ae8e58441470.png\"\n", + ",SPRITE_DIR / \"Referee.sprite3\" / \"5948c4160089fcc0975a867221ff2256.png\"\n", + ",SPRITE_DIR / \"Referee.sprite3\" / \"7eeca5313c2e7d455482badff3079f64.png\"\n", + ",SPRITE_DIR / \"Reindeer.sprite3\" / \"60993a025167e7886736109dca5d55e2.png\"\n", + ",SPRITE_DIR / \"Retro Robot.sprite3\" / \"35070c1078c4eec153ea2769516c922c.png\"\n", + ",SPRITE_DIR / \"Retro Robot.sprite3\" / \"53398a713b144ecda6ec32fb4a8d28e1.png\"\n", + ",SPRITE_DIR / \"Retro Robot.sprite3\" / \"d139f89665962dcaab4cb2b246359ba1.png\"\n", + ",SPRITE_DIR / \"Ripley.sprite3\" / \"043373c51689f3df8bf50eb12c4e3d39.png\"\n", + ",SPRITE_DIR / \"Ripley.sprite3\" / \"3ab169f52ea3783270d28ef035a5a7c5.png\"\n", + ",SPRITE_DIR / \"Ripley.sprite3\" / \"8e173178d886d1cb272877e8923d651b.png\"\n", + ",SPRITE_DIR / \"Ripley.sprite3\" / \"90feaffe3d0c4d31287d57bd1bc64afa.png\"\n", + ",SPRITE_DIR / \"Ripley.sprite3\" / \"e751d0a781694897f75046eb2810e9a5.png\"\n", + ",SPRITE_DIR / \"Ripley.sprite3\" / \"f798adaf44e8891c5e2f1b2a82a613b2.png\"\n", + ",SPRITE_DIR / \"Robot.sprite3\" / \"10060b3b58c77345cfe92288a46e5c20.png\"\n", + ",SPRITE_DIR / \"Robot.sprite3\" / \"36d1098b880dbe47e58d93e7b2842381.png\"\n", + ",SPRITE_DIR / \"Robot.sprite3\" / \"4f5441207afc9bc075b0b404dbba8b59.png\"\n", + ",SPRITE_DIR / \"Robot.sprite3\" / \"89679608327ad572b93225d06fe9edda.png\"\n", + ",SPRITE_DIR / \"Rocketship.sprite3\" / \"10f83786e5ee34f40ee43b49bba89ee2.png\"\n", + ",SPRITE_DIR / \"Rocketship.sprite3\" / \"49ee475c516a444d8a512724063b8b98.png\"\n", + ",SPRITE_DIR / \"Rocketship.sprite3\" / \"525c06ceb3a351244bcd810c9ba951c7.png\"\n", + ",SPRITE_DIR / \"Rocketship.sprite3\" / \"5682c68af2cc8aea791f0373e9ed03d8.png\"\n", + ",SPRITE_DIR / \"Rocketship.sprite3\" / \"a6ff2f1344a18cc0a4bcc945e00afaf4.png\"\n", + ",SPRITE_DIR / \"Rocks.sprite3\" / \"55426ccbb5c49b1526e53586943f3ec3.png\"\n", + ",SPRITE_DIR / \"Rooster.sprite3\" / \"0ae345deb1c81ec7f4f4644c26ac85fa.png\"\n", + ",SPRITE_DIR / \"Rooster.sprite3\" / \"6490360bd5d6efd2b646fb24c19df6b1.png\"\n", + ",SPRITE_DIR / \"Rooster.sprite3\" / \"bd5f701c99aa6512bac7b87c51e7cd46.png\"\n", + ",SPRITE_DIR / \"Ruby.sprite3\" / \"c30210e8f719c3a4d2c7cc6917a39300.png\"\n", + ",SPRITE_DIR / \"Ruby.sprite3\" / \"fc15fdbcc535473f6140cab28197f3be.png\"\n", + ",SPRITE_DIR / \"Sailboat.sprite3\" / \"ca241a938a2c44a0de6b91230012ff39.png\"\n", + ",SPRITE_DIR / \"Sam.sprite3\" / \"8208e99159b36c957fb9fbc187e51bc7.png\"\n", + ",SPRITE_DIR / \"Sasha.sprite3\" / \"89bb25e1465eb9481d267e4f9df592af.png\"\n", + ",SPRITE_DIR / \"Sasha.sprite3\" / \"a0b8890ce458aebed5e7002e1897508e.png\"\n", + ",SPRITE_DIR / \"Sasha.sprite3\" / \"e26bf53469cafd730ca150e745ceeafc.png\"\n", + ",SPRITE_DIR / \"Saxophone.sprite3\" / \"4414c51bdd03f60f40a1210e1d55cf57.png\"\n", + ",SPRITE_DIR / \"Saxophone.sprite3\" / \"459a64bebb7a788395c70e5369ab4746.png\"\n", + ",SPRITE_DIR / \"Scarf.sprite3\" / \"05b06ab8d2c6e2110896d70bb60a9fd7.png\"\n", + ",SPRITE_DIR / \"Scarf.sprite3\" / \"213db212d5d0c602f85cb248719ce785.png\"\n", + ",SPRITE_DIR / \"Scarf.sprite3\" / \"4a85e4e6232f12abf9802bec4aa419b3.png\"\n", + ",SPRITE_DIR / \"Shark 2.sprite3\" / \"6182a0628eadf2d16624864bea964432.png\"\n", + ",SPRITE_DIR / \"Shark 2.sprite3\" / \"7f4440b268358417aa79ccef06877c57.png\"\n", + ",SPRITE_DIR / \"Shark 2.sprite3\" / \"8a8d551e951087050cfa88fc64f9b4db.png\"\n", + ",SPRITE_DIR / \"Shark.sprite3\" / \"6c8008ae677ec51af8da5023fa2cd521.png\"\n", + ",SPRITE_DIR / \"Shark.sprite3\" / \"b769db8fcbbf2609f0552db62ec1f94a.png\"\n", + ",SPRITE_DIR / \"Shirt.sprite3\" / \"43e916bbe0ba7cecd08407d25ac3d104.png\"\n", + ",SPRITE_DIR / \"Shoes.sprite3\" / \"1e813a1618f38212a6febaa7e6b8d712.png\"\n", + ",SPRITE_DIR / \"Shoes.sprite3\" / \"71b5a444d482455e9956cfd52d20526a.png\"\n", + ",SPRITE_DIR / \"Shoes.sprite3\" / \"724d9a8984279949ce452fc9b2e437a6.png\"\n", + ",SPRITE_DIR / \"Shoes.sprite3\" / \"f89f1656251248f1591aa67ae946c047.png\"\n", + ",SPRITE_DIR / \"Shorts.sprite3\" / \"4d5f7a13ed20dc4f8fd194a7eb3f625f.png\"\n", + ",SPRITE_DIR / \"Shorts.sprite3\" / \"d5fc56b7247f079e5821d74d3e91e7a6.png\"\n", + ",SPRITE_DIR / \"Shorts.sprite3\" / \"ea78ad682811f9c42731ec648ec7af3c.png\"\n", + ",SPRITE_DIR / \"Singer1.sprite3\" / \"d6ff94dc7e24200c28015ee5d6373140.png\"\n", + ",SPRITE_DIR / \"Skeleton.sprite3\" / \"3cfff37072a4138b977ba406c290b419.png\"\n", + ",SPRITE_DIR / \"Skeleton.sprite3\" / \"67108e6b1d0f41aba2f94f81114ebf59.png\"\n", + ",SPRITE_DIR / \"Skeleton.sprite3\" / \"c4d755c672a0826caa7b6fb767cc3f9b.png\"\n", + ",SPRITE_DIR / \"Skeleton.sprite3\" / \"f4a00b2bd214b1d8412a2e89b2030354.png\"\n", + ",SPRITE_DIR / \"Snake.sprite3\" / \"42519e0ee19d75def88a514d3c49ce37.png\"\n", + ",SPRITE_DIR / \"Snake.sprite3\" / \"a0acb49efdf60b20cea0833eeedd44a1.png\"\n", + ",SPRITE_DIR / \"Snake.sprite3\" / \"f0e6ebdbdc8571b42f8a48cc2aed3042.png\"\n", + ",SPRITE_DIR / \"Snowflake.sprite3\" / \"083735cc9cd0e6d8c3dbab5ab9ee5407.png\"\n", + ",SPRITE_DIR / \"Snowman.sprite3\" / \"0f109df620f935b94cb154101e6586d4.png\"\n", + ",SPRITE_DIR / \"Soccer Ball.sprite3\" / \"5d973d7a3a8be3f3bd6e1cd0f73c32b5.png\"\n", + ",SPRITE_DIR / \"Speaker.sprite3\" / \"697f6becae5321f77990636564ef0c97.png\"\n", + ",SPRITE_DIR / \"Squirrel.sprite3\" / \"b86efb7f23387300cf9037a61f328ab9.png\"\n", + ",SPRITE_DIR / \"Star.sprite3\" / \"551629f2a64c1f3703e57aaa133effa6.png\"\n", + ",SPRITE_DIR / \"Starfish.sprite3\" / \"69dca6e42d45d3fef89f81de40b11bef.png\"\n", + ",SPRITE_DIR / \"Starfish.sprite3\" / \"be2ca55a5688670302e7c3f79d5040d1.png\"\n", + ",SPRITE_DIR / \"Stop.sprite3\" / \"1e2c3987e4cdb1f317b1773662719b13.png\"\n", + ",SPRITE_DIR / \"Story-A.sprite3\" / \"3c46f5192d2c29f957381e0100c6085d.png\"\n", + ",SPRITE_DIR / \"Story-A.sprite3\" / \"4b1beecd9a8892df0918242b2b5fbd4c.png\"\n", + ",SPRITE_DIR / \"Story-A.sprite3\" / \"7a6fdf5e26fc690879f8e215bfdec4d5.png\"\n", + ",SPRITE_DIR / \"Story-B.sprite3\" / \"22817ed2e4253787c78d7b696bbefdc1.png\"\n", + ",SPRITE_DIR / \"Story-B.sprite3\" / \"5f8301434ce176ab328f5b658ee1ec05.png\"\n", + ",SPRITE_DIR / \"Story-B.sprite3\" / \"a09376e1eacf17be3c9fbd268674b9f7.png\"\n", + ",SPRITE_DIR / \"Story-C.sprite3\" / \"5e61610cbba50ba86f18830f61bbaecb.png\"\n", + ",SPRITE_DIR / \"Story-C.sprite3\" / \"6bd5cb8bc3e4df5e055f4c56dd630855.png\"\n", + ",SPRITE_DIR / \"Story-C.sprite3\" / \"f6ff602902affbae2f89b389f08df432.png\"\n", + ",SPRITE_DIR / \"Story-D.sprite3\" / \"130cc4b9ad8dd8936d22c51c05ac6860.png\"\n", + ",SPRITE_DIR / \"Story-D.sprite3\" / \"b28d76f648ad24932a18cb40c8d76bc5.png\"\n", + ",SPRITE_DIR / \"Story-D.sprite3\" / \"dd713e3bf42d7a4fd8d2f12094db1c63.png\"\n", + ",SPRITE_DIR / \"Story-E.sprite3\" / \"3005df22798da45f1daf1de7421bb91d.png\"\n", + ",SPRITE_DIR / \"Story-E.sprite3\" / \"4e903ac41a7e16a52efff8477f2398c7.png\"\n", + ",SPRITE_DIR / \"Story-E.sprite3\" / \"add5c5a8eec67eb010b5cbd44dea5c8d.png\"\n", + ",SPRITE_DIR / \"Story-F.sprite3\" / \"4a3ae31dd3dd3b96239a0307cfdaa1b6.png\"\n", + ",SPRITE_DIR / \"Story-F.sprite3\" / \"83565581ecc9f7d4010efd8683a99393.png\"\n", + ",SPRITE_DIR / \"Story-F.sprite3\" / \"d4ec9a1827429f4e2f3dc239dcc15b95.png\"\n", + ",SPRITE_DIR / \"Story-G.sprite3\" / \"648cfdd48a7f748e6198194669ba1909.png\"\n", + ",SPRITE_DIR / \"Story-G.sprite3\" / \"85144902cc61fe98dca513b74276d7d8.png\"\n", + ",SPRITE_DIR / \"Story-G.sprite3\" / \"8fb61932544adbe8c95b067ad1351758.png\"\n", + ",SPRITE_DIR / \"Story-H.sprite3\" / \"70520daa9f82a2347c8a8fa9e7fe1a6e.png\"\n", + ",SPRITE_DIR / \"Story-H.sprite3\" / \"99aae97a2b49904db7eeb813fa968582.png\"\n", + ",SPRITE_DIR / \"Story-H.sprite3\" / \"eec286b1cfea3f219a5b486931abedd2.png\"\n", + ",SPRITE_DIR / \"Story-I.sprite3\" / \"1bceea90292a51a7177abf581f28bf2c.png\"\n", + ",SPRITE_DIR / \"Story-I.sprite3\" / \"2c156e20da1ad4e8e397a89ad8fb1c26.png\"\n", + ",SPRITE_DIR / \"Story-I.sprite3\" / \"9cad752323aa81dfa8d8cf009057b108.png\"\n", + ",SPRITE_DIR / \"Story-J.sprite3\" / \"2838de5d131785c985eb0eab25ec63af.png\"\n", + ",SPRITE_DIR / \"Story-J.sprite3\" / \"7d7d6f257a6bf3668a0befa4199f16a0.png\"\n", + ",SPRITE_DIR / \"Story-J.sprite3\" / \"d5b58ddd6f6b4fdcfdfd86d102853935.png\"\n", + ",SPRITE_DIR / \"Story-K.sprite3\" / \"0cb908dbc38635cc595e6060afc1b682.png\"\n", + ",SPRITE_DIR / \"Story-K.sprite3\" / \"17ef8f63a2a8f47258bd62cf642fd8d6.png\"\n", + ",SPRITE_DIR / \"Story-K.sprite3\" / \"ecf86afea23fd95e27d4e63659adbfa6.png\"\n", + ",SPRITE_DIR / \"Story-L.sprite3\" / \"0fc3ac08468935694255ef8a461d4d26.png\"\n", + ",SPRITE_DIR / \"Story-L.sprite3\" / \"935c7cf21c35523c0a232013a6399a49.png\"\n", + ",SPRITE_DIR / \"Story-L.sprite3\" / \"ec4d85a60c32c7637de31dbf503266a0.png\"\n", + ",SPRITE_DIR / \"Story-M.sprite3\" / \"42e5468fa164e001925d5a49d372f4b1.png\"\n", + ",SPRITE_DIR / \"Story-M.sprite3\" / \"643896fcad0a1bf6eb9f3f590094687c.png\"\n", + ",SPRITE_DIR / \"Story-M.sprite3\" / \"9bf9e677da34528433d3c1acb945e2df.png\"\n", + ",SPRITE_DIR / \"Story-N.sprite3\" / \"40ffad793f4042a5fe7b3aaa6bc175ae.png\"\n", + ",SPRITE_DIR / \"Story-N.sprite3\" / \"80c8f32282b697097933837905a6f257.png\"\n", + ",SPRITE_DIR / \"Story-N.sprite3\" / \"c2f77473dd16d1a3713218b05390a688.png\"\n", + ",SPRITE_DIR / \"Story-O.sprite3\" / \"0bdd31ea2b3b78d0c39022795a49c69a.png\"\n", + ",SPRITE_DIR / \"Story-O.sprite3\" / \"40bf3880b678beeda8cf708a51a4402d.png\"\n", + ",SPRITE_DIR / \"Story-O.sprite3\" / \"43a89fc1442627ca48b1dc631c517942.png\"\n", + ",SPRITE_DIR / \"Story-P.sprite3\" / \"1a41f74cd76d7202d8b22ffc7729e03f.png\"\n", + ",SPRITE_DIR / \"Story-P.sprite3\" / \"377eac55366670a03c469705c6689f09.png\"\n", + ",SPRITE_DIR / \"Story-P.sprite3\" / \"9cf707e83af27c47e74adb77496ffca5.png\"\n", + ",SPRITE_DIR / \"Story-Q.sprite3\" / \"01acd1076994a4379a3fc9e034bc05fc.png\"\n", + ",SPRITE_DIR / \"Story-Q.sprite3\" / \"84a6dc992bce018a1eac9be0173ad917.png\"\n", + ",SPRITE_DIR / \"Story-Q.sprite3\" / \"efc27a91c30d6a511be4245e36684192.png\"\n", + ",SPRITE_DIR / \"Story-R.sprite3\" / \"3c3f44aba3eff8856472e06b333a7201.png\"\n", + ",SPRITE_DIR / \"Story-R.sprite3\" / \"4f217b14a161fcd9590614b0733100ea.png\"\n", + ",SPRITE_DIR / \"Story-R.sprite3\" / \"5c1d38d02ae9c4df7851a6e9d52f25b4.png\"\n", + ",SPRITE_DIR / \"Story-S.sprite3\" / \"47b9f910048ce4db93bdfbcd2638e19a.png\"\n", + ",SPRITE_DIR / \"Story-S.sprite3\" / \"5a113fcacd35ababbf23c5a9289433d1.png\"\n", + ",SPRITE_DIR / \"Story-S.sprite3\" / \"fd2a94481c3ef0c223784b2f3c6df874.png\"\n", + ",SPRITE_DIR / \"Story-T.sprite3\" / \"001a2186db228fdd9bfbf3f15800bb63.png\"\n", + ",SPRITE_DIR / \"Story-T.sprite3\" / \"66b22b0ff0a5c1c205a701316ab954cf.png\"\n", + ",SPRITE_DIR / \"Story-T.sprite3\" / \"b61e1ac30aa2f35d4fd8c23fab1f76ea.png\"\n", + ",SPRITE_DIR / \"Story-U.sprite3\" / \"51dd73c840ba3aca0f9770e13cb14fb3.png\"\n", + ",SPRITE_DIR / \"Story-U.sprite3\" / \"cfb334b977b8f2a39aa56b1e0532829e.png\"\n", + ",SPRITE_DIR / \"Story-U.sprite3\" / \"f6b7b4da5362fdac29d84f1fbf19e3f4.png\"\n", + ",SPRITE_DIR / \"Story-V.sprite3\" / \"43a8993221848f90e9f37664e7832b4a.png\"\n", + ",SPRITE_DIR / \"Story-V.sprite3\" / \"d5c20886e3eb0ca0f5430c9482b1d832.png\"\n", + ",SPRITE_DIR / \"Story-V.sprite3\" / \"f27e7a4216665a6eab43fe9b4b5ec934.png\"\n", + ",SPRITE_DIR / \"Story-W.sprite3\" / \"396e27d20d1a49edaa106ba6d667cedd.png\"\n", + ",SPRITE_DIR / \"Story-W.sprite3\" / \"528df57da4490f6da8c75da06a1367f5.png\"\n", + ",SPRITE_DIR / \"Story-W.sprite3\" / \"f21ba826cd88c376e868f079d6df273c.png\"\n", + ",SPRITE_DIR / \"Story-X.sprite3\" / \"04be1176e562eff16f1159f69945a82e.png\"\n", + ",SPRITE_DIR / \"Story-X.sprite3\" / \"ca4e3e84788bdeea42dd5ed952d5a66c.png\"\n", + ",SPRITE_DIR / \"Story-X.sprite3\" / \"db0c1a6499169aac6639a1a0076658ce.png\"\n", + ",SPRITE_DIR / \"Story-Y.sprite3\" / \"093a9410933f7d01f459f08bcb01735b.png\"\n", + ",SPRITE_DIR / \"Story-Y.sprite3\" / \"59275f907633ce02074f787e5767bfde.png\"\n", + ",SPRITE_DIR / \"Story-Y.sprite3\" / \"d7fabe2652c93dd1bf91d9064cf5a348.png\"\n", + ",SPRITE_DIR / \"Story-Z.sprite3\" / \"23c24dbee23b1545afa8ee15ed339327.png\"\n", + ",SPRITE_DIR / \"Story-Z.sprite3\" / \"34825a171f7b35962484fa53e99ff632.png\"\n", + ",SPRITE_DIR / \"Story-Z.sprite3\" / \"665db4c356d7e010fa8d71cc291834e3.png\"\n", + ",SPRITE_DIR / \"Strawberry.sprite3\" / \"10ed1486ff4bab3eebb3b8ae55d81ccd.png\"\n", + ",SPRITE_DIR / \"Strawberry.sprite3\" / \"2fa57942dc7ded7eddc4d41554768d67.png\"\n", + ",SPRITE_DIR / \"Strawberry.sprite3\" / \"662279c12965d2913a060a55aebec496.png\"\n", + ",SPRITE_DIR / \"Strawberry.sprite3\" / \"aa4eae20c750900e4f63e6ede4083d81.png\"\n", + ",SPRITE_DIR / \"Strawberry.sprite3\" / \"f5008785e74590689afca4b578d108a4.png\"\n", + ",SPRITE_DIR / \"Sun.sprite3\" / \"406808d86aff20a15d592b308e166a32.png\"\n", + ",SPRITE_DIR / \"Sunglasses1.sprite3\" / \"c95a05c3bed665027d267d93454c428a.png\"\n", + ",SPRITE_DIR / \"Sunglasses1.sprite3\" / \"dc568ae1f8b9b6544f0634ef975a7098.png\"\n", + ",SPRITE_DIR / \"Taco.sprite3\" / \"383ea1ef802bc2706670536cfa8271b7.png\"\n", + ",SPRITE_DIR / \"Taco.sprite3\" / \"c97113d17afeaac9f461ea0ec257ef26.png\"\n", + ",SPRITE_DIR / \"Takeout.sprite3\" / \"24cc271fd6cf55f25b71e78faf749a98.png\"\n", + ",SPRITE_DIR / \"Takeout.sprite3\" / \"2b32d6a4a724c38bfaeb494d30827f19.png\"\n", + ",SPRITE_DIR / \"Takeout.sprite3\" / \"40f63eb18230c4defa9051830beffb0f.png\"\n", + ",SPRITE_DIR / \"Takeout.sprite3\" / \"9202a59888545c56c864bacb700c4297.png\"\n", + ",SPRITE_DIR / \"Takeout.sprite3\" / \"e03cd6e668e0eeddb2da98a095e2f30f.png\"\n", + ",SPRITE_DIR / \"Tatiana.sprite3\" / \"5cf65a9f942ca92c93915527ff9db1e6.png\"\n", + ",SPRITE_DIR / \"Tatiana.sprite3\" / \"91fb7d056beaf553ccec03d61d72c545.png\"\n", + ",SPRITE_DIR / \"Tatiana.sprite3\" / \"e207fd3f99e1db8c5d66f49446f27e37.png\"\n", + ",SPRITE_DIR / \"Tatiana.sprite3\" / \"e2ea6bbc6066574d4836e808a1c5f849.png\"\n", + ",SPRITE_DIR / \"Taylor.sprite3\" / \"a504d785629f2d1ca6b87e80b334d5e8.png\"\n", + ",SPRITE_DIR / \"Taylor.sprite3\" / \"ae2eaae0882543dc276c8e7d56ff2e7b.png\"\n", + ",SPRITE_DIR / \"Taylor.sprite3\" / \"e0082f49fc5d0d83d7fad6124ba82bb1.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"279bd5499329f98a68cf92c68014e198.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"377b8521c436f4f39ed2100fa1cb7c2f.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"3c9a7eac1d696ae74ee40c6efa8fa4dd.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"548bdf23904e409c1fcc0992f44d0b4c.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"580fba92f23d5592200eb5a9079dc38f.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"8313a2229d555bbdb8ce92dffed067ad.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"86602007ae2952236d47d7fd587a56b6.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"b2f75ac1cd84615efaea6a7d7a4ee205.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"ce2141ce97921ddc333bc65ff5bec27d.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"e06ac61e96e3a5abf4ca0863816f5d28.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"e51942bb4651e616549cfce1ad36ff83.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"f60f99278455c843b7833fb7615428dd.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"fea7045c09073700b88fae8d4d257cd1.png\"\n", + ",SPRITE_DIR / \"Tennis Ball.sprite3\" / \"34fa36004be0340ec845ba6bbeb5e5d5.png\"\n", + ",SPRITE_DIR / \"Tera.sprite3\" / \"18f9a11ecdbd3ad8719beb176c484d41.png\"\n", + ",SPRITE_DIR / \"Tera.sprite3\" / \"2daca5f43efc2d29fb089879448142e9.png\"\n", + ",SPRITE_DIR / \"Tera.sprite3\" / \"365d4de6c99d71f1370f7c5e636728af.png\"\n", + ",SPRITE_DIR / \"Tera.sprite3\" / \"5456a723f3b35eaa946b974a59888793.png\"\n", + ",SPRITE_DIR / \"Toucan.sprite3\" / \"72952d831d0b67c9d056b44a4bc3d0ae.png\"\n", + ",SPRITE_DIR / \"Toucan.sprite3\" / \"9eef2e49b3bbf371603ae783cd82db3c.png\"\n", + ",SPRITE_DIR / \"Toucan.sprite3\" / \"b6345d7386021ee85bb17f8aa4950eed.png\"\n", + ",SPRITE_DIR / \"Trampoline.sprite3\" / \"8fa3c6fcff2f25f5fe7842d68dcfe5cf.png\"\n", + ",SPRITE_DIR / \"Tree1.sprite3\" / \"d04b15886635101db8220a4361c0c88d.png\"\n", + ",SPRITE_DIR / \"Trees.sprite3\" / \"04758bd432a8b1cab527bddf14432147.png\"\n", + ",SPRITE_DIR / \"Trees.sprite3\" / \"551b3fae8eab06b49013f54009a7767a.png\"\n", + ",SPRITE_DIR / \"Trisha.sprite3\" / \"2d06023ec09ec312ab49055530511134.png\"\n", + ",SPRITE_DIR / \"Trisha.sprite3\" / \"55d31103bc86447c6a727b4f0664a5ea.png\"\n", + ",SPRITE_DIR / \"Trisha.sprite3\" / \"c31dc8487a841f644889784ff437e2c5.png\"\n", + ",SPRITE_DIR / \"Truck.sprite3\" / \"63b00424bdabc3459e5bc554c6c21e06.png\"\n", + ",SPRITE_DIR / \"Truck.sprite3\" / \"aaa05abc5aa182a0d7bfdc6db0f3207a.png\"\n", + ",SPRITE_DIR / \"Truck.sprite3\" / \"ce077e6db3573062017f94c2e4a8caea.png\"\n", + ",SPRITE_DIR / \"Trumpet.sprite3\" / \"47a1ec267505be96b678df30b92ec534.png\"\n", + ",SPRITE_DIR / \"Trumpet.sprite3\" / \"9a5c211622d6d2fed600c1809fccd21d.png\"\n", + ",SPRITE_DIR / \"Unicorn 2.sprite3\" / \"dcbeac8e856c9ddd6c457376be6573c8.png\"\n", + ",SPRITE_DIR / \"Unicorn Running.sprite3\" / \"1fb3d038e985c01899881bc5bb373c16.png\"\n", + ",SPRITE_DIR / \"Unicorn Running.sprite3\" / \"4709966d11b37e8a11d24c800e8b2859.png\"\n", + ",SPRITE_DIR / \"Unicorn Running.sprite3\" / \"8feaeec435125227c675dd95f69ff835.png\"\n", + ",SPRITE_DIR / \"Unicorn Running.sprite3\" / \"e111350b8bedefffee0d5e7e2490d446.png\"\n", + ",SPRITE_DIR / \"Unicorn Running.sprite3\" / \"f00efa25fc97f2cce2499771d6a5f809.png\"\n", + ",SPRITE_DIR / \"Unicorn Running.sprite3\" / \"fa5fe4596494a43db8c7957d2254aee3.png\"\n", + ",SPRITE_DIR / \"Unicorn.sprite3\" / \"1439d51d9878276362b123c9045af6b5.png\"\n", + ",SPRITE_DIR / \"Wand.sprite3\" / \"c021f0c7e3086a11336421dd864b7812.png\"\n", + ",SPRITE_DIR / \"Wanda.sprite3\" / \"0b008dabac95126132ab4e0c56d25400.png\"\n", + ",SPRITE_DIR / \"Watermelon.sprite3\" / \"1ed1c8b78eae2ee7422074d7f883031d.png\"\n", + ",SPRITE_DIR / \"Watermelon.sprite3\" / \"21d1340478e32a942914a7afd12b9f1a.png\"\n", + ",SPRITE_DIR / \"Watermelon.sprite3\" / \"677738282686d2dcce35d731c3ddc043.png\"\n", + ",SPRITE_DIR / \"Winter Hat.sprite3\" / \"2672323e34d6dc82fda8fc3b057fa5aa.png\"\n", + ",SPRITE_DIR / \"Witch.sprite3\" / \"44cbaf358d2d8e66815e447c25a4b72e.png\"\n", + ",SPRITE_DIR / \"Witch.sprite3\" / \"668c9dc76ba6a07bebabf5aed4623566.png\"\n", + ",SPRITE_DIR / \"Witch.sprite3\" / \"a7e48fc790511fbd46b30b1cdcdc98fc.png\"\n", + ",SPRITE_DIR / \"Witch.sprite3\" / \"b10fb75f426397e10c878fda19d92009.png\"\n", + ",SPRITE_DIR / \"Wizard Girl.sprite3\" / \"4be145d338d921b2d9d6dfd10cda4a6c.png\"\n", + ",SPRITE_DIR / \"Wizard Hat.sprite3\" / \"398e447e36465c2521fdb3a6917b0c65.png\"\n", + ",SPRITE_DIR / \"Wizard-toad.sprite3\" / \"4041d5a2d1869e81268b9b92b49013a3.png\"\n", + ",SPRITE_DIR / \"Wizard-toad.sprite3\" / \"ca3bb4d397ecf6cda3edc48340af908b.png\"\n", + ",SPRITE_DIR / \"Wizard.sprite3\" / \"55ba51188af86ca16ef30267e874c1ed.png\"\n", + ",SPRITE_DIR / \"Wizard.sprite3\" / \"91d495085eb4d02a375c42f6318071e7.png\"\n", + ",SPRITE_DIR / \"Wizard.sprite3\" / \"df943c9894ee4b9df8c5893ce30c2a5f.png\"\n", + ",SPRITE_DIR / \"Zebra.sprite3\" / \"0e3bc5073305b7079b5e9a8c7b7d7f9b.png\"\n", + ",SPRITE_DIR / \"Zebra.sprite3\" / \"f3e322a25b9f79801066056de6f33fb1.png\"\n", + ",BACKDROP_DIR / \"Arctic.sb3\" / \"67e0db3305b3c8bac3a363b1c428892e.png\"\n", + ",BACKDROP_DIR / \"Arctic.sb3\" / \"8eb8790be5507fdccf73e7c1570bbbab.png\"\n", + ",BACKDROP_DIR / \"Baseball 1.sb3\" / \"825d9b54682c406215d9d1f98a819449.png\"\n", + ",BACKDROP_DIR / \"Baseball 2.sb3\" / \"7be1f5b3e682813dac1f297e52ff7dca.png\"\n", + ",BACKDROP_DIR / \"Basketball 1.sb3\" / \"ae21eac3d1814aee1d37ae82ea287816.png\"\n", + ",BACKDROP_DIR / \"Basketball 2.sb3\" / \"a5865738283613a2725b2c9dda6d8c78.png\"\n", + ",BACKDROP_DIR / \"Beach Malibu.sb3\" / \"050615fe992a00d6af0e664e497ebf53.png\"\n", + ",BACKDROP_DIR / \"Beach Rio.sb3\" / \"968f0ede6e70e1dbb763d6fd4c5003e0.png\"\n", + ",BACKDROP_DIR / \"Bedroom 1.sb3\" / \"7aa6bbb2ddc4c10f901e1a50aeac1c7e.png\"\n", + ",BACKDROP_DIR / \"Bedroom 2.sb3\" / \"e2f8b0dbd0a65d2ad8bfc21616662a6a.png\"\n", + ",BACKDROP_DIR / \"Bedroom 3.sb3\" / \"8cc0b88d53345b3e337e8f028a32a4e7.png\"\n", + ",BACKDROP_DIR / \"Bench With View.sb3\" / \"962201a2b712a302fb087f8f0dcb2076.png\"\n", + ",BACKDROP_DIR / \"Blue Sky 2.sb3\" / \"8eb8790be5507fdccf73e7c1570bbbab.png\"\n", + ",BACKDROP_DIR / \"Blue Sky.sb3\" / \"e7c147730f19d284bcd7b3f00af19bb6.png\"\n", + ",BACKDROP_DIR / \"Boardwalk.sb3\" / \"de0e54cd11551566f044e7e6bc588b2c.png\"\n", + ",BACKDROP_DIR / \"Canyon.sb3\" / \"c7c0b27b959193a0b570a9639cfe8158.png\"\n", + ",BACKDROP_DIR / \"Castle 1.sb3\" / \"e1914ed7917267f1c2ef2b48004cade9.png\"\n", + ",BACKDROP_DIR / \"Castle 2.sb3\" / \"951765ee7f7370f120c9df20b577c22f.png\"\n", + ",BACKDROP_DIR / \"Castle 3.sb3\" / \"76fa99f67569fcd39b4be74ed38c33f3.png\"\n", + ",BACKDROP_DIR / \"Castle 4.sb3\" / \"4f45f79af8e8dac3d41eb5a06ade61d4.png\"\n", + ",BACKDROP_DIR / \"Chalkboard.sb3\" / \"a8a24b5aa717bbef09dbe31368914427.png\"\n", + ",BACKDROP_DIR / \"Circles.sb3\" / \"c9847be305920807c5597d81576dd0c4.png\"\n", + ",BACKDROP_DIR / \"City With Water.sb3\" / \"1ef98019fc94ea65a1b55d5521285c7a.png\"\n", + ",BACKDROP_DIR / \"Colorful City.sb3\" / \"04d18ddd1b85f0ea30beb14b8da49f60.png\"\n", + ",BACKDROP_DIR / \"Concert.sb3\" / \"c8d90320d2966c08af8cdd1c6a7a93b5.png\"\n", + ",BACKDROP_DIR / \"Desert.sb3\" / \"d98a9526a34890cf4bad11b5409eae2a.png\"\n", + ",BACKDROP_DIR / \"Farm.sb3\" / \"1e8a70bd07f1dcba3383883f3b948266.png\"\n", + ",BACKDROP_DIR / \"Field At Mit.sb3\" / \"5b0a970202b464915915260c03f05455.png\"\n", + ",BACKDROP_DIR / \"Flowers.sb3\" / \"25a6ede51a96d4e55de2ffb81ae96f8c.png\"\n", + ",BACKDROP_DIR / \"Forest.sb3\" / \"92968ac16b2f0c3f7835a6dacd172c7b.png\"\n", + ",BACKDROP_DIR / \"Galaxy.sb3\" / \"5fab1922f254ae9fd150162c3e392bef.png\"\n", + ",BACKDROP_DIR / \"Garden-rock.sb3\" / \"4f66053598bea0905e1559ab9d5a6e31.png\"\n", + ",BACKDROP_DIR / \"Greek Theater.sb3\" / \"93d71e8b8a96cc007b8d68f36acd338a.png\"\n", + ",BACKDROP_DIR / \"Hall.sb3\" / \"ea86ca30b346f27ca5faf1254f6a31e3.png\"\n", + ",BACKDROP_DIR / \"Hay Field.sb3\" / \"da102a69d135973e0fc139131dec785a.png\"\n", + ",BACKDROP_DIR / \"Hearts.sb3\" / \"f98526ccb0eec3ac7d6c8f8ab502825e.png\"\n", + ",BACKDROP_DIR / \"Hill.sb3\" / \"2129c842f28d6881f622fdc3497ff2da.png\"\n", + ",BACKDROP_DIR / \"Jungle.sb3\" / \"f4f908da19e2753f3ed679d7b37650ca.png\"\n", + ",BACKDROP_DIR / \"Jurassic.sb3\" / \"64025bdca5db4938f65597e3682fddcf.png\"\n", + ",BACKDROP_DIR / \"Light.sb3\" / \"4b98c07876ed8997c3762e75790507b4.png\"\n", + ",BACKDROP_DIR / \"Metro.sb3\" / \"0b4a15ba028bf205ec051390d6ac4de7.png\"\n", + ",BACKDROP_DIR / \"Moon.sb3\" / \"0b1d2eaf22d62ef88de80ccde5578fba.png\"\n", + ",BACKDROP_DIR / \"Mountain.sb3\" / \"f84989feee2cf462a1c597169777ee3c.png\"\n", + ",BACKDROP_DIR / \"Mural.sb3\" / \"efb625f7e0b199b15f69e116cd053cea.png\"\n", + ",BACKDROP_DIR / \"Nebula.sb3\" / \"9b5cdbd596da1b6149f56b794b6394f4.png\"\n", + ",BACKDROP_DIR / \"Neon Tunnel.sb3\" / \"57d2b13b2f73d3d878c72810c137b0d6.png\"\n", + ",BACKDROP_DIR / \"Night City With Street.sb3\" / \"14443ad7907b6479d7562a12b8ae0efb.png\"\n", + ",BACKDROP_DIR / \"Night City.sb3\" / \"6fdc795ff487204f72740567be5f64f9.png\"\n", + ",BACKDROP_DIR / \"Party.sb3\" / \"108160d0e44d1c340182e31c9dc0758a.png\"\n", + ",BACKDROP_DIR / \"Pathway.sb3\" / \"5d747ec036755a4b129f0d5b978bc61c.png\"\n", + ",BACKDROP_DIR / \"Playground.sb3\" / \"e5f794c8756ca0cead5cb7e7fe354c41.png\"\n", + ",BACKDROP_DIR / \"Playing Field.sb3\" / \"2de108f3098e92f5c5976cf75d38e99d.png\"\n", + ",BACKDROP_DIR / \"Pool.sb3\" / \"6cab934df643d2fc508cfa90c0c4059b.png\"\n", + ",BACKDROP_DIR / \"Rays.sb3\" / \"87e963282db9e020e8c4d075891ea12b.png\"\n", + ",BACKDROP_DIR / \"Refrigerator.sb3\" / \"98f053f9681e872f34fafd783ce72205.png\"\n", + ",BACKDROP_DIR / \"Room 1.sb3\" / \"87ec29ad216c0074c731d581c7f40c39.png\"\n", + ",BACKDROP_DIR / \"Room 1.sb3\" / \"a81668321aa3dcc0fc185d3e36ae76f6.png\"\n", + ",BACKDROP_DIR / \"Room 2.sb3\" / \"05ae3e3bbea890a6e3552ffe8456775e.png\"\n", + ",BACKDROP_DIR / \"Savanna.sb3\" / \"9b020b8c7cb6a9592f7303add9441d8f.png\"\n", + ",BACKDROP_DIR / \"School.sb3\" / \"1dea69ac0f62cf538d368a7bde1372ac.png\"\n", + ",BACKDROP_DIR / \"Slopes.sb3\" / \"63b6a69594a0a87888b56244bfa2ac1b.png\"\n", + ",BACKDROP_DIR / \"Soccer 2.sb3\" / \"b0dc1268cb595aaeef405bce40d1639c.png\"\n", + ",BACKDROP_DIR / \"Soccer.sb3\" / \"04a63154f04b09494354090f7cc2f1b9.png\"\n", + ",BACKDROP_DIR / \"Space City 1.sb3\" / \"20344b0edcc498281e4cb80242a72667.png\"\n", + ",BACKDROP_DIR / \"Space City 2.sb3\" / \"32b2316fd375faa18088f6c57ebb1c8d.png\"\n", + ",BACKDROP_DIR / \"Space.sb3\" / \"84208d9a3718ec3c9fc5a32a792fa1d0.png\"\n", + ",BACKDROP_DIR / \"Spaceship.sb3\" / \"0c450891306fa63ef02aa0fda7fd0ef9.png\"\n", + ",BACKDROP_DIR / \"Spotlight.sb3\" / \"d26bf4c3980163d9106625cc2ea6c50d.png\"\n", + ",BACKDROP_DIR / \"Stars.sb3\" / \"47282ff0f7047c6fab9c94b531abf721.png\"\n", + ",BACKDROP_DIR / \"Stripes.sb3\" / \"a6a21f5c08d586e8daaebde37c97fb6f.png\"\n", + ",BACKDROP_DIR / \"Theater 2.sb3\" / \"061a78ed83495dd0acd6d62e83e1b972.png\"\n", + ",BACKDROP_DIR / \"Theater.sb3\" / \"c2b097bc5cdb6a14ef5485202bc5ee76.png\"\n", + ",BACKDROP_DIR / \"Tree.sb3\" / \"a23fbf972001c94637b568992f8fd7bd.png\"\n", + ",BACKDROP_DIR / \"Underwater 1.sb3\" / \"d3344650f594bcecdf46aa4a9441badd.png\"\n", + ",BACKDROP_DIR / \"Underwater 2.sb3\" / \"1517c21786d2d0edc2f3037408d850bd.png\"\n", + ",BACKDROP_DIR / \"Urban.sb3\" / \"1679049718869e1f548e1e8823e29c1c.png\"\n", + ",BACKDROP_DIR / \"Wall 1.sb3\" / \"7e5327c68ff6ddabc48dbfe4717a04fe.png\"\n", + ",BACKDROP_DIR / \"Wall 2.sb3\" / \"82d867fcd9f1b5f49e29c2f853d55665.png\"\n", + ",BACKDROP_DIR / \"Water And Rocks.sb3\" / \"0015433a406a53f00b792424b823268c.png\"\n", + ",BACKDROP_DIR / \"Wetland.sb3\" / \"ef9973bcff6d4cbc558e946028ec7d23.png\"\n", + ",BACKDROP_DIR / \"Winter.sb3\" / \"5fa9385a60b904672d0e46e9d768bb32.png\"\n", + ",BACKDROP_DIR / \"Witch House.sb3\" / \"30085b2d27beb5acdbe895d8b3e64b04.png\"\n", + ",BACKDROP_DIR / \"Woods And Bench.sb3\" / \"4fcf7ed0de6c6b6e9b52c511b0650e9c.png\"\n", + ",BACKDROP_DIR / \"Woods.sb3\" / \"f3eb165d6f3fd23370f97079f2e631bf.png\"\n", + ",BACKDROP_DIR / \"Xy-grid-20px.sb3\" / \"4eec0e1db92b8dea3e5bee25105e8f46.png\"\n", + ",BACKDROP_DIR / \"Xy-grid-30px.sb3\" / \"3b8bcabd0ac683b7cb3673208039764b.png\"\n", + ",BACKDROP_DIR / \"Xy-grid.sb3\" / \"9838d02002d05f88dc54d96494fbc202.png\",\n", + " CODE_BLOCKS_DIR / \"script1.jpg\", \n", + " CODE_BLOCKS_DIR / \"script2.jpg\",\n", + " CODE_BLOCKS_DIR / \"script3.jpg\",\n", + " CODE_BLOCKS_DIR / \"script4.jpg\",\n", + " CODE_BLOCKS_DIR / \"script5.jpg\",\n", + " CODE_BLOCKS_DIR / \"script6.jpg\"]\n", + "out_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\embed2.json\"\n", + "regenerate_reference_embeddings(folder_image_paths, out_path)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4e711ae7", + "metadata": {}, + "outputs": [], + "source": [ + "# import cv2, json,base64,io,os,tempfile,logging, re\n", + "# from io import BytesIO\n", + "# folder_image_paths = [\n", + "# SPRITE_DIR / \"Abby.sprite3\" / \"34a175600dc009a521eb46fdbbbeeb67.png\"\n", + "# ,SPRITE_DIR / \"Abby.sprite3\" / \"45de34b47a2ce22f6f5d28bb35a44ff5.png\"\n", + "# ,SPRITE_DIR / \"Abby.sprite3\" / \"809d9b47347a6af2860e7a3a35bce057.png\"\n", + "# ,SPRITE_DIR / \"Abby.sprite3\" / \"920f14335615fff9b8c55fccb8971984.png\"\n", + "# ,SPRITE_DIR / \"Amon.sprite3\" / \"60f720956ab1840431dcf0616ce98f14.png\"\n", + "# ,SPRITE_DIR / \"Andie.sprite3\" / \"b36584db82bdd45014430aa918461ca0.png\"\n", + "# ,SPRITE_DIR / \"Andie.sprite3\" / \"b3fc774e753fef520fb544127a48554b.png\"\n", + "# ,SPRITE_DIR / \"Andie.sprite3\" / \"d92aaf6cf44921905d51ca4a10a4f3d6.png\"\n", + "# ,SPRITE_DIR / \"Andie.sprite3\" / \"ded71c8a0f39852178f1695b622c2d89.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"105f4f3d260dcb8bea02ea9ee5d18cf4.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"2d208a34e74fdce9dab9d4c585dcfa2b.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"3948aad16f8169c013c956dd152a09a6.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"4931a363e3e4efa20230f6ff2991c6b4.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"62c50c90535b64f2ae130a5c680ddcb4.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"7bb9c790b02231e1272701167c26b17a.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"84c5e22b4303c7c1fb707125706c9aaa.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"b7693bd6250d4411ee622b67f8025924.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"ca27e001a263ee6b5852508f39d021db.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"d86bb27b4f8d7b70c39c96f29c6943b4.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"db6c03113f71b91f22a9f3351f90e5bf.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"e3698b76cb0864df2fbaba80e6bd8067.png\"\n", + "# ,SPRITE_DIR / \"Anina Dance.sprite3\" / \"ed90e8b7a05c1552194af597ac0637cd.png\"\n", + "# ,SPRITE_DIR / \"Apple.sprite3\" / \"3826a4091a33e4d26f87a2fac7cf796b.png\"\n", + "# ,SPRITE_DIR / \"Arrow1.sprite3\" / \"65b8e977641885010a10a46512fb95b4.png\"\n", + "# ,SPRITE_DIR / \"Arrow1.sprite3\" / \"70ffa0bae8693418459f21f370584f6d.png\"\n", + "# ,SPRITE_DIR / \"Arrow1.sprite3\" / \"be8fcd10da0b082f8d4775088ef7bd52.png\"\n", + "# ,SPRITE_DIR / \"Arrow1.sprite3\" / \"dafcdfda65af14e172809984710f31a9.png\"\n", + "# ,SPRITE_DIR / \"Avery Walking.sprite3\" / \"3a935fe75ac999e22b93d06b3081a271.png\"\n", + "# ,SPRITE_DIR / \"Avery Walking.sprite3\" / \"448e54fb14b13d492885fc247e76b7f4.png\"\n", + "# ,SPRITE_DIR / \"Avery Walking.sprite3\" / \"8f439476a738251043d488d7a4bc6870.png\"\n", + "# ,SPRITE_DIR / \"Avery Walking.sprite3\" / \"dc6a584704c09a3fbafb9825635a9fd4.png\"\n", + "# ,SPRITE_DIR / \"Avery.sprite3\" / \"944385ea927e8f9d72b9e19620487999.png\"\n", + "# ,SPRITE_DIR / \"Avery.sprite3\" / \"f52bde34d8027aab14b53f228fe5cc14.png\"\n", + "# ,SPRITE_DIR / \"Ball.sprite3\" / \"1c44b7494dec047371f74c705f1d99fc.png\"\n", + "# ,SPRITE_DIR / \"Ball.sprite3\" / \"3c6241985b581284ec191f9d1deffde8.png\"\n", + "# ,SPRITE_DIR / \"Ball.sprite3\" / \"ad7dc51cafd73e8279073e33b0eab335.png\"\n", + "# ,SPRITE_DIR / \"Ball.sprite3\" / \"db144b2a19f4f1ab31e30d58f00447dc.png\"\n", + "# ,SPRITE_DIR / \"Ball.sprite3\" / \"f221a2edf87aff3615c0c003e616b31b.png\"\n", + "# ,SPRITE_DIR / \"Ballerina.sprite3\" / \"4ccb1752a43f48aafe490c9c08e58c27.png\"\n", + "# ,SPRITE_DIR / \"Ballerina.sprite3\" / \"5197d3778baf55da6b81b3ada1e10021.png\"\n", + "# ,SPRITE_DIR / \"Ballerina.sprite3\" / \"5aae21aee33c3f1ae943af5ea11254bf.png\"\n", + "# ,SPRITE_DIR / \"Ballerina.sprite3\" / \"fc02bf591dd3d91eeeb50c7424d08274.png\"\n", + "# ,SPRITE_DIR / \"Balloon1.sprite3\" / \"63e5aea255610f9fdf0735e1e9a55a5c.png\"\n", + "# ,SPRITE_DIR / \"Balloon1.sprite3\" / \"a2516ac2b8d7a348194908e630387ea9.png\"\n", + "# ,SPRITE_DIR / \"Balloon1.sprite3\" / \"d7974f9e15000c16222f94ee32d8227a.png\"\n", + "# ,SPRITE_DIR / \"Bananas.sprite3\" / \"e5d3d3eb61797f5999732a8f5efead24.png\"\n", + "# ,SPRITE_DIR / \"Baseball.sprite3\" / \"74e08fc57820f925c7689e7b754c5848.png\"\n", + "# ,SPRITE_DIR / \"Basketball.sprite3\" / \"6b0b2aaa12d655e96b5b34e92d9fbd4f.png\"\n", + "# ,SPRITE_DIR / \"Bat.sprite3\" / \"4e4ced87ed37ee66c758bba077e0eae6.png\"\n", + "# ,SPRITE_DIR / \"Bat.sprite3\" / \"60f5bfce5d9b11bfcd199a6aa5454b3f.png\"\n", + "# ,SPRITE_DIR / \"Bat.sprite3\" / \"698c2a48e774f9959d57c9618b156c20.png\"\n", + "# ,SPRITE_DIR / \"Bat.sprite3\" / \"bc6dd12fc9e407c7774959cdf427f8b5.png\"\n", + "# ,SPRITE_DIR / \"Batter.sprite3\" / \"592ee9ab2aeefe65cb4fb95fcd046f33.png\"\n", + "# ,SPRITE_DIR / \"Batter.sprite3\" / \"9d193bef6e3d6d8eba6d1470b8bf9351.png\"\n", + "# ,SPRITE_DIR / \"Batter.sprite3\" / \"baseball_sprite_motion_1.png\"\n", + "# ,SPRITE_DIR / \"Batter.sprite3\" / \"bd4fc003528acfa847e45ff82f346eee.png\"\n", + "# ,SPRITE_DIR / \"Batter.sprite3\" / \"fdfde4bcbaca0f68e83fdf3f4ef0c660.png\"\n", + "# ,SPRITE_DIR / \"Beachball.sprite3\" / \"5198b5a03ebae60698e0906f59a5fc15.png\"\n", + "# ,SPRITE_DIR / \"Bear-walking.sprite3\" / \"0a38a860f2e573b8dc5b09f390d30fbd.png\"\n", + "# ,SPRITE_DIR / \"Bear-walking.sprite3\" / \"36d06aa23c684fc996952adb0e76e6b4.png\"\n", + "# ,SPRITE_DIR / \"Bear-walking.sprite3\" / \"6d4d06e3f4cd0c9455b777b9a40782b6.png\"\n", + "# ,SPRITE_DIR / \"Bear-walking.sprite3\" / \"6d50c5fe63ab5f77d10144a68ca535a6.png\"\n", + "# ,SPRITE_DIR / \"Bear-walking.sprite3\" / \"7453709bef16e33e6f989aee14d7fc07.png\"\n", + "# ,SPRITE_DIR / \"Bear-walking.sprite3\" / \"d2a5f124f988def1d214e6d0813a48f3.png\"\n", + "# ,SPRITE_DIR / \"Bear-walking.sprite3\" / \"e531b307381c2aa148be4ccc36db0333.png\"\n", + "# ,SPRITE_DIR / \"Bear-walking.sprite3\" / \"f36c80d2e731be95df7ec6d07f89fa00.png\"\n", + "# ,SPRITE_DIR / \"Bear.sprite3\" / \"6f303e972f33fcb7ef36d0d8012d0975.png\"\n", + "# ,SPRITE_DIR / \"Bear.sprite3\" / \"bear_motion_2.png\"\n", + "# ,SPRITE_DIR / \"Bear.sprite3\" / \"deef1eaa96d550ae6fc11524a1935024.png\"\n", + "# ,SPRITE_DIR / \"Beetle.sprite3\" / \"46d0dfd4ae7e9bfe3a6a2e35a4905eae.png\"\n", + "# ,SPRITE_DIR / \"Bell.sprite3\" / \"8c0234fe1bfd36f5a72e975fbbc18bfd.png\"\n", + "# ,SPRITE_DIR / \"Ben.sprite3\" / \"165d993c30dfdb9e829d0d98867d7826.png\"\n", + "# ,SPRITE_DIR / \"Ben.sprite3\" / \"2cd77b8a9961e7ad4da905e7731b7c1b.png\"\n", + "# ,SPRITE_DIR / \"Ben.sprite3\" / \"9f9f88aea3457084d8d734040b0b9067.png\"\n", + "# ,SPRITE_DIR / \"Ben.sprite3\" / \"acc208e29f0422c2bcffa3b8873abc63.png\"\n", + "# ,SPRITE_DIR / \"Block-A.sprite3\" / \"ef3b01f6fc1ffa1270fbbf057f7ded42.png\"\n", + "# ,SPRITE_DIR / \"Block-B.sprite3\" / \"1dc05fbaa37a6b41ffff459d0a776989.png\"\n", + "# ,SPRITE_DIR / \"Block-C.sprite3\" / \"43090c4b423c977041542ce12017fda0.png\"\n", + "# ,SPRITE_DIR / \"Block-D.sprite3\" / \"1fb3db31500d6f7da662e825157920fa.png\"\n", + "# ,SPRITE_DIR / \"Block-E.sprite3\" / \"240aacc04444cef3b2ef8cfaf0dae479.png\"\n", + "# ,SPRITE_DIR / \"Block-F.sprite3\" / \"d88d750ce848d7dbeeca3f02249350e2.png\"\n", + "# ,SPRITE_DIR / \"Block-G.sprite3\" / \"989c76ae7f8c2e42ebeacdda961061ca.png\"\n", + "# ,SPRITE_DIR / \"Block-H.sprite3\" / \"93426b2f313d1bdedff368d94fc989d6.png\"\n", + "# ,SPRITE_DIR / \"Block-I.sprite3\" / \"f911b18605f59c75adf4d83e07811fd8.png\"\n", + "# ,SPRITE_DIR / \"Block-J.sprite3\" / \"8580c990ac918577550165447f870542.png\"\n", + "# ,SPRITE_DIR / \"Block-K.sprite3\" / \"d93a9fd4bfb5bc1e9790945fa756b748.png\"\n", + "# ,SPRITE_DIR / \"Block-L.sprite3\" / \"579c90cbaf847e9adf4faf37f340b32d.png\"\n", + "# ,SPRITE_DIR / \"Block-M.sprite3\" / \"6c5cf1fd0673f441b04e15e799685831.png\"\n", + "# ,SPRITE_DIR / \"Block-N.sprite3\" / \"9eba5dd44d65e1d421c40686fecde906.png\"\n", + "# ,SPRITE_DIR / \"Block-O.sprite3\" / \"8bbbde09c13a06015e554ab36fa178c0.png\"\n", + "# ,SPRITE_DIR / \"Block-P.sprite3\" / \"0f920b99ac49421cf28e55c8d863bdc5.png\"\n", + "# ,SPRITE_DIR / \"Block-Q.sprite3\" / \"67f8e80eabaec4883eb9c67c9527004a.png\"\n", + "# ,SPRITE_DIR / \"Block-R.sprite3\" / \"9d0432c5575451e251990d89845f8d00.png\"\n", + "# ,SPRITE_DIR / \"Block-S.sprite3\" / \"83c7486b08e78d099b4e776aaa2783fe.png\"\n", + "# ,SPRITE_DIR / \"Block-T.sprite3\" / \"6c1b26611ec0483f601a648f59305aff.png\"\n", + "# ,SPRITE_DIR / \"Block-U.sprite3\" / \"d02f77994789f528f0aaa7f211690151.png\"\n", + "# ,SPRITE_DIR / \"Block-V.sprite3\" / \"0654cfcb6234406837336e90be7e419c.png\"\n", + "# ,SPRITE_DIR / \"Block-W.sprite3\" / \"2b3145ae89c32793c4fcea9a6bcc6075.png\"\n", + "# ,SPRITE_DIR / \"Block-X.sprite3\" / \"a73f354dc045bbbc5a491d9367192a80.png\"\n", + "# ,SPRITE_DIR / \"Block-Y.sprite3\" / \"e13e79f106d32a3176dbcf5c1b35827d.png\"\n", + "# ,SPRITE_DIR / \"Block-Z.sprite3\" / \"c57d371b291d43675f46601518098572.png\"\n", + "# ,SPRITE_DIR / \"Bowl.sprite3\" / \"d147f16e3e2583719c073ac5b55fe3ca.png\"\n", + "# ,SPRITE_DIR / \"Bowtie.sprite3\" / \"4b032ba44b8077439e73815542e7ed23.png\"\n", + "# ,SPRITE_DIR / \"Bread.sprite3\" / \"585de1550446d4420f8a10fdecac995b.png\"\n", + "# ,SPRITE_DIR / \"Broom.sprite3\" / \"556288a1c996345c751a3dc88b570cfa.png\"\n", + "# ,SPRITE_DIR / \"Buildings.sprite3\" / \"148034b1557cc3dae39953e43ab50ff0.png\"\n", + "# ,SPRITE_DIR / \"Buildings.sprite3\" / \"4212ff1769c169bfa0db043b18fdade8.png\"\n", + "# ,SPRITE_DIR / \"Buildings.sprite3\" / \"80b120b7152ed72fded84fef485f4f79.png\"\n", + "# ,SPRITE_DIR / \"Buildings.sprite3\" / \"8f64966be60d332b345598819c67a8b6.png\"\n", + "# ,SPRITE_DIR / \"Buildings.sprite3\" / \"a8c977a3b85ffe8c8b453c9d668989b8.png\"\n", + "# ,SPRITE_DIR / \"Buildings.sprite3\" / \"bb47a3d5d03a34937557c558c6cb5d18.png\"\n", + "# ,SPRITE_DIR / \"Buildings.sprite3\" / \"d1fcce0aac589a17324943a3b759fc2a.png\"\n", + "# ,SPRITE_DIR / \"Buildings.sprite3\" / \"e4764cfc384a499f92da3ea745bcebe2.png\"\n", + "# ,SPRITE_DIR / \"Buildings.sprite3\" / \"e8c9508b1f6a0a432e09c10ef9ada67c.png\"\n", + "# ,SPRITE_DIR / \"Buildings.sprite3\" / \"fcedb6b25a2db6de28b39130f978b0bf.png\"\n", + "# ,SPRITE_DIR / \"Butterfly 1.sprite3\" / \"34b76c1835c6a7fc2c47956e49bb0f52.png\"\n", + "# ,SPRITE_DIR / \"Butterfly 1.sprite3\" / \"49c9f952007d870a046cff93b6e5e098.png\"\n", + "# ,SPRITE_DIR / \"Butterfly 1.sprite3\" / \"fe98df7367e314d9640bfaa54fc239be.png\"\n", + "# ,SPRITE_DIR / \"Butterfly 2.sprite3\" / \"372ae0abd2e8e50a20bc12cb160d8746.png\"\n", + "# ,SPRITE_DIR / \"Butterfly 2.sprite3\" / \"e96f4c6913107c9b790d37bb65507c14.png\"\n", + "# ,SPRITE_DIR / \"Button1.sprite3\" / \"21fb7fa07eac4794fded0be4e18e20a2.png\"\n", + "# ,SPRITE_DIR / \"Button2.sprite3\" / \"329bf3d86050ceaea2b27e2c5d2baec1.png\"\n", + "# ,SPRITE_DIR / \"Button2.sprite3\" / \"af4cd54e776031bc9cc54ddd6892f97b.png\"\n", + "# ,SPRITE_DIR / \"Button3.sprite3\" / \"5021f6b7d166873ef0711c4d4a351912.png\"\n", + "# ,SPRITE_DIR / \"Button3.sprite3\" / \"a3b357ea21773bcb3545a227ee877e9a.png\"\n", + "# ,SPRITE_DIR / \"Button4.sprite3\" / \"71ced7c192168c7b221d16b4eaff440e.png\"\n", + "# ,SPRITE_DIR / \"Button4.sprite3\" / \"7d34ad26633abbc752c9cd93ace0a81f.png\"\n", + "# ,SPRITE_DIR / \"Button5.sprite3\" / \"94957f2f79e8970d8b2cd0f74a0c1ffc.png\"\n", + "# ,SPRITE_DIR / \"Button5.sprite3\" / \"a4bb9a9e06e65337798471035719985a.png\"\n", + "# ,SPRITE_DIR / \"Cake.sprite3\" / \"862488bf66b67c5330cae9235b853b6e.png\"\n", + "# ,SPRITE_DIR / \"Cake.sprite3\" / \"dfe9c5d40da0dcc386fad524c36d3579.png\"\n", + "# ,SPRITE_DIR / \"Calvrett.sprite3\" / \"452683db3ad7a882f5ab9de496441592.png\"\n", + "# ,SPRITE_DIR / \"Calvrett.sprite3\" / \"728ec1ebc275b53809023a36c66eeaa3.png\"\n", + "# ,SPRITE_DIR / \"Casey.sprite3\" / \"50bd5162671b8a30fcfa3082a9e79ec4.png\"\n", + "# ,SPRITE_DIR / \"Casey.sprite3\" / \"e09e5ef2bdeb69163a543f3216c1f54c.png\"\n", + "# ,SPRITE_DIR / \"Casey.sprite3\" / \"e5a47371f3e9f853b36560cda35344b6.png\"\n", + "# ,SPRITE_DIR / \"Casey.sprite3\" / \"ebc3de539e02801d420268eb189c5a47.png\"\n", + "# ,SPRITE_DIR / \"Cassy Dance.sprite3\" / \"63483bbf72fc55719918a335e1a16426.png\"\n", + "# ,SPRITE_DIR / \"Cassy Dance.sprite3\" / \"6cb3686db1fa658b6541cc9fa3ccfcc7.png\"\n", + "# ,SPRITE_DIR / \"Cassy Dance.sprite3\" / \"aca39a47cf3affd8a83d3287d2856c29.png\"\n", + "# ,SPRITE_DIR / \"Cassy Dance.sprite3\" / \"f801cec764da5ef6374e1d557296d14e.png\"\n", + "# ,SPRITE_DIR / \"Cat 2.sprite3\" / \"7499cf6ec438d0c7af6f896bc6adc294.png\"\n", + "# ,SPRITE_DIR / \"Cat Flying.sprite3\" / \"6667936a2793aade66c765c329379ad0.png\"\n", + "# ,SPRITE_DIR / \"Cat Flying.sprite3\" / \"a1ab94c8172c3b97ed9a2bf7c32172cd.png\"\n", + "# ,SPRITE_DIR / \"Cat.sprite3\" / \"0fb9be3e8397c983338cb71dc84d0b25.png\"\n", + "# ,SPRITE_DIR / \"Cat.sprite3\" / \"bcf454acf82e4504149f7ffe07081dbc.png\"\n", + "# ,SPRITE_DIR / \"Catcher.sprite3\" / \"895cdda4f2bd9d6f50ff07188e7ce395.png\"\n", + "# ,SPRITE_DIR / \"Catcher.sprite3\" / \"8aa875f077c405e2045f5ab60705e712.png\"\n", + "# ,SPRITE_DIR / \"Catcher.sprite3\" / \"99af13802e9bfd7b4a4bfb8ead825c0c.png\"\n", + "# ,SPRITE_DIR / \"Catcher.sprite3\" / \"a31e30677637ae4de975d40b6d822853.png\"\n", + "# ,SPRITE_DIR / \"Centaur.sprite3\" / \"2373556e776cad3ba4d6ee04fc34550b.png\"\n", + "# ,SPRITE_DIR / \"Centaur.sprite3\" / \"c00ffa6c5dd0baf9f456b897ff974377.png\"\n", + "# ,SPRITE_DIR / \"Centaur.sprite3\" / \"d722329bd9373ad80625e5be6d52f3ed.png\"\n", + "# ,SPRITE_DIR / \"Centaur.sprite3\" / \"d7aa990538915b7ef1f496d7e8486ade.png\"\n", + "# ,SPRITE_DIR / \"Champ99.sprite3\" / \"20318b14a332fd618ec91e7c1de8be9a.png\"\n", + "# ,SPRITE_DIR / \"Champ99.sprite3\" / \"26fdff424232926001d20041c3d5673b.png\"\n", + "# ,SPRITE_DIR / \"Champ99.sprite3\" / \"56f3220fa82d99dcfc7d27d433ed01e4.png\"\n", + "# ,SPRITE_DIR / \"Champ99.sprite3\" / \"68453506ae4b6b60a3fc6817ba39d492.png\"\n", + "# ,SPRITE_DIR / \"Champ99.sprite3\" / \"7b073f47fbd9421e0d60daacc157f506.png\"\n", + "# ,SPRITE_DIR / \"Champ99.sprite3\" / \"a28ffc2b129fb359ff22c79c48341267.png\"\n", + "# ,SPRITE_DIR / \"Champ99.sprite3\" / \"d6ae13605610aa008d48b0c8b25a57d3.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"03bc23a9fa12c1244c83a07a81f20bfd.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"0f18f9e90d0ed68ebec23da087eb2603.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"1044a68cc743f83564e36a6bca16830b.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"1e303bb57aac0cb4678e85de4251f3f4.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"527ba82c5e82f43c8fca0be905dbe20a.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"5e2f620e5687a36e1954414054c69ccc.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"6be261800647c53becb1f93ed31ed13e.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"6d5ddfc69f9c6a3f1d2ded1428237931.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"6f78ce6a87d114162ed9fbef30f9a0fd.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"984043e1e7c544999c31f952d1d43a56.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"b37d0e0d46f07cb2cbdc5285e176bf62.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"cc0be722cf93eef63726bd606ab11c5c.png\"\n", + "# ,SPRITE_DIR / \"Characters 1.sprite3\" / \"f26b130c2c58b812be21d1a9745863a1.png\"\n", + "# ,SPRITE_DIR / \"Characters 2.sprite3\" / \"1cf73a791959e07b5bafe18474f93b78.png\"\n", + "# ,SPRITE_DIR / \"Characters 2.sprite3\" / \"67d425b11544caa0fe9228f355c6485b.png\"\n", + "# ,SPRITE_DIR / \"Characters 2.sprite3\" / \"7084b3baab935de819cc5ab46f7cecf8.png\"\n", + "# ,SPRITE_DIR / \"Characters 2.sprite3\" / \"93e035270675f933b94ee951d7e475e3.png\"\n", + "# ,SPRITE_DIR / \"Characters 2.sprite3\" / \"bf0d808f7bf0c11c338b4fea0a735874.png\"\n", + "# ,SPRITE_DIR / \"Characters 2.sprite3\" / \"db3f436fcb6fb28828a4c932b60feb5e.png\"\n", + "# ,SPRITE_DIR / \"Characters 2.sprite3\" / \"df7cbf2913bcea721df2e0360644f193.png\"\n", + "# ,SPRITE_DIR / \"Characters 2.sprite3\" / \"e0eacf1e575adc559c41e3a81a892168.png\"\n", + "# ,SPRITE_DIR / \"Characters 2.sprite3\" / \"e8b44b0e904fd4bb7430c26b743f1520.png\"\n", + "# ,SPRITE_DIR / \"Characters 2.sprite3\" / \"f4f2778df2840de5a6449a49f3efb599.png\"\n", + "# ,SPRITE_DIR / \"Cheesy Puffs.sprite3\" / \"82772a61ec74974e84c686c61ea0b7d5.png\"\n", + "# ,SPRITE_DIR / \"Chick.sprite3\" / \"5e23c8c28ffd390df7deb2414be37781.png\"\n", + "# ,SPRITE_DIR / \"Chick.sprite3\" / \"77911bbe5e11ede35871e8002a26356d.png\"\n", + "# ,SPRITE_DIR / \"Chick.sprite3\" / \"80abbc427366bca477ccf1ef0faf240a.png\"\n", + "# ,SPRITE_DIR / \"City Bus.sprite3\" / \"7d7e26014a346b894db8ab1819f2167f.png\"\n", + "# ,SPRITE_DIR / \"City Bus.sprite3\" / \"e9694adbff9422363e2ea03166015393.png\"\n", + "# ,SPRITE_DIR / \"Cloud.sprite3\" / \"c9630e30e59e4565e785a26f58568904.png\"\n", + "# ,SPRITE_DIR / \"Clouds.sprite3\" / \"0188b2c7c85176b462881c6bca7a7748.png\"\n", + "# ,SPRITE_DIR / \"Clouds.sprite3\" / \"9105d7dd90b5f2a4b85a1e71aff8703f.png\"\n", + "# ,SPRITE_DIR / \"Clouds.sprite3\" / \"9f2eccce13e3e5fd212efd59ff1d96a0.png\"\n", + "# ,SPRITE_DIR / \"Clouds.sprite3\" / \"9f5958f46d21e33d3f6d7caffbe0daa9.png\"\n", + "# ,SPRITE_DIR / \"Convertible 2.sprite3\" / \"621817ef84ad81f5690fac95adab2ede.png\"\n", + "# ,SPRITE_DIR / \"Convertible.sprite3\" / \"5b883f396844ff5cfecd7c95553fa4fb.png\"\n", + "# ,SPRITE_DIR / \"Crab.sprite3\" / \"49839aa1b0feed02a3c759db5f8dee71.png\"\n", + "# ,SPRITE_DIR / \"Crab.sprite3\" / \"f7cdd2acbc6d7559d33be8675059c79e.png\"\n", + "# ,SPRITE_DIR / \"Crystal.sprite3\" / \"0a7b872042cecaf30cc154c0144f002b.png\"\n", + "# ,SPRITE_DIR / \"Crystal.sprite3\" / \"ecd1e7805b37db4caf207b7eef2b7a42.png\"\n", + "# ,SPRITE_DIR / \"D-Money Dance.sprite3\" / \"05529eb3c09294bd15f57c6f10d5894e.png\"\n", + "# ,SPRITE_DIR / \"D-Money Dance.sprite3\" / \"12db59633a1709a2c39534d35263791f.png\"\n", + "# ,SPRITE_DIR / \"D-Money Dance.sprite3\" / \"19bd7995d37e3baade673b2fe7cb982b.png\"\n", + "# ,SPRITE_DIR / \"D-Money Dance.sprite3\" / \"32ec7b5332cfebd1cfed7f6b79c76e67.png\"\n", + "# ,SPRITE_DIR / \"D-Money Dance.sprite3\" / \"344384a6a3f1bdf494cc7af31e928d36.png\"\n", + "# ,SPRITE_DIR / \"D-Money Dance.sprite3\" / \"3cdebabdb41f6c3e84561cf3ea87bac3.png\"\n", + "# ,SPRITE_DIR / \"D-Money Dance.sprite3\" / \"50faf1630ea383c0b8c77f70a9329797.png\"\n", + "# ,SPRITE_DIR / \"D-Money Dance.sprite3\" / \"70da166596bb484eae1bfbaad5c03d54.png\"\n", + "# ,SPRITE_DIR / \"D-Money Dance.sprite3\" / \"729812366245c0dafd456339c9d94e08.png\"\n", + "# ,SPRITE_DIR / \"D-Money Dance.sprite3\" / \"a22da98e5e63de7b2883355afd0184f0.png\"\n", + "# ,SPRITE_DIR / \"D-Money Dance.sprite3\" / \"a4b5d644d9abdbcab236acf19b2a2e81.png\"\n", + "# ,SPRITE_DIR / \"D-Money Dance.sprite3\" / \"dafbdfe454c5ec7029b5c1e07fcabc90.png\"\n", + "# ,SPRITE_DIR / \"Dan.sprite3\" / \"307250744e230fb15e7062238bf2634c.png\"\n", + "# ,SPRITE_DIR / \"Dan.sprite3\" / \"89b55d049f4b3811676311df00681385.png\"\n", + "# ,SPRITE_DIR / \"Dani.sprite3\" / \"2cba86439098a7e0daa46e0ff8a59f7c.png\"\n", + "# ,SPRITE_DIR / \"Dani.sprite3\" / \"6518333c95cf96a9aaf73a4a948e002f.png\"\n", + "# ,SPRITE_DIR / \"Dani.sprite3\" / \"b5f989e21b56af371209369c331b821e.png\"\n", + "# ,SPRITE_DIR / \"Dee.sprite3\" / \"1de3bbee2771b0ff16c4658d5ad98b0b.png\"\n", + "# ,SPRITE_DIR / \"Dee.sprite3\" / \"320a892c86e9b039ba9d6d50a4897276.png\"\n", + "# ,SPRITE_DIR / \"Dee.sprite3\" / \"43bd4c241a94b3aea883472d7dab5afc.png\"\n", + "# ,SPRITE_DIR / \"Dee.sprite3\" / \"c57c4593701165cdea6de9b014c7c06d.png\"\n", + "# ,SPRITE_DIR / \"Dee.sprite3\" / \"e4c6ada3509f7033d14bac2c0eea49dc.png\"\n", + "# ,SPRITE_DIR / \"Devin.sprite3\" / \"5ab51aeaa296e955e75a7a3c103ebb99.png\"\n", + "# ,SPRITE_DIR / \"Devin.sprite3\" / \"5f614017dba0ce6bff063f6c62041035.png\"\n", + "# ,SPRITE_DIR / \"Devin.sprite3\" / \"9d7414a719d6cc5e0e9071ede200a29c.png\"\n", + "# ,SPRITE_DIR / \"Devin.sprite3\" / \"bfc7c20b64f86d4b207780f3da695fa4.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur1.sprite3\" / \"22d94ee5daf557284465425a61186234.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur1.sprite3\" / \"45b02fbd582c15a50e1953830b59b377.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur1.sprite3\" / \"7f89417968116ada83d4ddaad22403b3.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur1.sprite3\" / \"af158d368bf3da576369be1130e18acd.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur2.sprite3\" / \"0e43f8e573bf232505b207b92efac2ac.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur2.sprite3\" / \"7799f2848136d11f48ca5f3105d336ef.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur2.sprite3\" / \"d926c5758d130fcfd9a7ae7dac47e47d.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur2.sprite3\" / \"e606ba27dfe94daf3d8e3fdf599e37cf.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur3.sprite3\" / \"5381feb0fc1b50ddc2793342daddffef.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur3.sprite3\" / \"ae98efa1c3c3700602e1344db86aaf72.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur3.sprite3\" / \"cf4fb77a4e9839f83d3fa5fc0982ccd3.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur3.sprite3\" / \"d85ec1b97f73564ef26fec73d5056c68.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur3.sprite3\" / \"e731d1f1ebf4bc0ea55b850ffe5a5f96.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur4.sprite3\" / \"723bd1559f8baae4184fa24a6513362b.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur4.sprite3\" / \"a98e3f93853513e7c00bab4c61752312.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur4.sprite3\" / \"ac99ef62e3e018b8db550bb2a187cbe9.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur4.sprite3\" / \"c63cca929380152b978d8671fe6003f7.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur5.sprite3\" / \"26fca11e4251d60ed7aa5d08f4ae2a69.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur5.sprite3\" / \"3b2cf97b1cc7fc535162ba5849a0e29c.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur5.sprite3\" / \"42e3bf118c775ba54239af4276800a0a.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur5.sprite3\" / \"5882227a9e2f0f3b2014c49328969762.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur5.sprite3\" / \"5a0832162a0cfa7adab6090c42e89714.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur5.sprite3\" / \"9d200a7c2e93eac8cf52ede3a87d7969.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur5.sprite3\" / \"c4044a3badea77ced4f2db69aff866ed.png\"\n", + "# ,SPRITE_DIR / \"Dinosaur5.sprite3\" / \"f49b3b098a24474f20c8f4686681c611.png\"\n", + "# ,SPRITE_DIR / \"Diver1.sprite3\" / \"a24f23a0f5d77cfb59721ef8f6bfe5c7.png\"\n", + "# ,SPRITE_DIR / \"Diver2.sprite3\" / \"ef8136a42b7d20961756e551bc87b37f.png\"\n", + "# ,SPRITE_DIR / \"Dog1.sprite3\" / \"35cd78a8a71546a16c530d0b2d7d5a7f.png\"\n", + "# ,SPRITE_DIR / \"Dog1.sprite3\" / \"d5a72e1eb23a91df4b53c0b16493d1e6.png\"\n", + "# ,SPRITE_DIR / \"Dog2.sprite3\" / \"4708bff29b3a295a03ac1d5e2d16ec75.png\"\n", + "# ,SPRITE_DIR / \"Dog2.sprite3\" / \"66b435d333f34d02d5ae49a598bcc5b3.png\"\n", + "# ,SPRITE_DIR / \"Dog2.sprite3\" / \"6afc06388d69f99e28d883126f9b2734.png\"\n", + "# ,SPRITE_DIR / \"Donut.sprite3\" / \"316a67c9e966fd015b4538f54be456db.png\"\n", + "# ,SPRITE_DIR / \"Dorian.sprite3\" / \"603d3dd151984c0eaa2822f70a234c28.png\"\n", + "# ,SPRITE_DIR / \"Dorian.sprite3\" / \"7d20ec98603857c031c1f4ad2bd8ea51.png\"\n", + "# ,SPRITE_DIR / \"Dorian.sprite3\" / \"8f2be2387efcbb5d4878886adaa2a88e.png\"\n", + "# ,SPRITE_DIR / \"Dorian.sprite3\" / \"a9a064a1f28c9e22b594dcea1d46025b.png\"\n", + "# ,SPRITE_DIR / \"Dot.sprite3\" / \"106461f60e34ce231b323e2dd2d9f05b.png\"\n", + "# ,SPRITE_DIR / \"Dot.sprite3\" / \"21482022f9930400302bc8ec70643717.png\"\n", + "# ,SPRITE_DIR / \"Dot.sprite3\" / \"9e5a6cc6970ce4932a09affba70a45b0.png\"\n", + "# ,SPRITE_DIR / \"Dot.sprite3\" / \"fb047c94113ee4c6664305a338525e6a.png\"\n", + "# ,SPRITE_DIR / \"Dove.sprite3\" / \"0f83ab55012a7affd94e38250d55a0a0.png\"\n", + "# ,SPRITE_DIR / \"Dove.sprite3\" / \"778a699a044a0a8c10f44c3194e21ef2.png\"\n", + "# ,SPRITE_DIR / \"Dragon.sprite3\" / \"12ead885460d96a19132e5970839d36d.png\"\n", + "# ,SPRITE_DIR / \"Dragon.sprite3\" / \"3f672475ad4ca5d1f9331cffd4223140.png\"\n", + "# ,SPRITE_DIR / \"Dragon.sprite3\" / \"e0aa0083fa0b97da97600d4dbb2055e5.png\"\n", + "# ,SPRITE_DIR / \"Dragonfly.sprite3\" / \"17b864c1ddd4b349a6c4bd5709167307.png\"\n", + "# ,SPRITE_DIR / \"Dragonfly.sprite3\" / \"5cdfe67af929e3fb095e83c9c4b0bd78.png\"\n", + "# ,SPRITE_DIR / \"Dress.sprite3\" / \"4e22e6fd72500f0a25b959283bfd0a32.png\"\n", + "# ,SPRITE_DIR / \"Dress.sprite3\" / \"c5fb135d89573570010b0d96c94bcec6.png\"\n", + "# ,SPRITE_DIR / \"Dress.sprite3\" / \"ddbea537af6012ebac18d16d65c07479.png\"\n", + "# ,SPRITE_DIR / \"Drum Kit.sprite3\" / \"3f4fb4836338c55f883607c403b2b25e.png\"\n", + "# ,SPRITE_DIR / \"Drum Kit.sprite3\" / \"baf6344b6f55b074786a383c1097697d.png\"\n", + "# ,SPRITE_DIR / \"Drum-cymbal.sprite3\" / \"08355ec8cc4b3263f502adfdea993cda.png\"\n", + "# ,SPRITE_DIR / \"Drum-cymbal.sprite3\" / \"78398692e6fa226568df0374c4358da4.png\"\n", + "# ,SPRITE_DIR / \"Drum-highhat.sprite3\" / \"15b2a31a57d0cd911ad0b1c265dcf59e.png\"\n", + "# ,SPRITE_DIR / \"Drum-highhat.sprite3\" / \"866b3a49ee2a45998940e2d737c4c502.png\"\n", + "# ,SPRITE_DIR / \"Drum-snare.sprite3\" / \"28298d93f5282041267a92bd67308107.png\"\n", + "# ,SPRITE_DIR / \"Drum-snare.sprite3\" / \"c42bb05aab3cacddcd88712e33ab8df0.png\"\n", + "# ,SPRITE_DIR / \"Drum.sprite3\" / \"47531b5675be696d0540eb120d5d0678.png\"\n", + "# ,SPRITE_DIR / \"Drum.sprite3\" / \"ce6971317035091341ec40571c9056e9.png\"\n", + "# ,SPRITE_DIR / \"Drums Conga.sprite3\" / \"2b2eacfce0fb1af023e6ca0f5ef6defe.png\"\n", + "# ,SPRITE_DIR / \"Drums Conga.sprite3\" / \"bdad2f140cfbd021f38241fc9acc7fd2.png\"\n", + "# ,SPRITE_DIR / \"Drums Tabla.sprite3\" / \"992d6359be830d977559dad91b04f698.png\"\n", + "# ,SPRITE_DIR / \"Drums Tabla.sprite3\" / \"af071d9d714c5c622e2bb07133698ce3.png\"\n", + "# ,SPRITE_DIR / \"Duck.sprite3\" / \"c9837d0454f5f0f73df290af2045359b.png\"\n", + "# ,SPRITE_DIR / \"Earth.sprite3\" / \"7405b5efa96995bae6853667f8cd145e.png\"\n", + "# ,SPRITE_DIR / \"Easel.sprite3\" / \"6a736beddc7844538be390c18b7c4361.png\"\n", + "# ,SPRITE_DIR / \"Easel.sprite3\" / \"a4b3714322c11b350f09a75921ae606b.png\"\n", + "# ,SPRITE_DIR / \"Easel.sprite3\" / \"caec09682a7fcdffef4647e8355ba004.png\"\n", + "# ,SPRITE_DIR / \"Egg.sprite3\" / \"0d127490af16f8a4ca5ce3212b2391c2.png\"\n", + "# ,SPRITE_DIR / \"Egg.sprite3\" / \"41535b4742f40e2630746b0c4bec98f2.png\"\n", + "# ,SPRITE_DIR / \"Egg.sprite3\" / \"b0b6e88ec64b842398200bab562b53e3.png\"\n", + "# ,SPRITE_DIR / \"Egg.sprite3\" / \"bb0505b802140a8cc200c9f8bfce4503.png\"\n", + "# ,SPRITE_DIR / \"Egg.sprite3\" / \"f8ee449298c1446cb0ef281923a4e57a.png\"\n", + "# ,SPRITE_DIR / \"Egg.sprite3\" / \"fbc629c3b062423e8c09cfacfb1e65f8.png\"\n", + "# ,SPRITE_DIR / \"Elephant.sprite3\" / \"2c9b5e0125d95b8bc511f6bb09b5ea2f.png\"\n", + "# ,SPRITE_DIR / \"Elephant.sprite3\" / \"b59873e9558c1c456200f50e5ab34770.png\"\n", + "# ,SPRITE_DIR / \"Elf.sprite3\" / \"524406c2b1fe253c1565ff516309817e.png\"\n", + "# ,SPRITE_DIR / \"Elf.sprite3\" / \"808c6fa2eb1cba0de1d17b18c6f41279.png\"\n", + "# ,SPRITE_DIR / \"Elf.sprite3\" / \"92ff640b911a8348d2734c0e38bba68c.png\"\n", + "# ,SPRITE_DIR / \"Elf.sprite3\" / \"e92abad171396a3198455df8557802e5.png\"\n", + "# ,SPRITE_DIR / \"Elf.sprite3\" / \"ec458328a85f89f06866e2337076ac0a.png\"\n", + "# ,SPRITE_DIR / \"Fairy.sprite3\" / \"40d726e17bfd2ffeb8c0aa5393ee1c77.png\"\n", + "# ,SPRITE_DIR / \"Fairy.sprite3\" / \"902350bba0d4b4612db1e2e902b6f201.png\"\n", + "# ,SPRITE_DIR / \"Fairy.sprite3\" / \"bea920473027f43e04c44e588c6cc39a.png\"\n", + "# ,SPRITE_DIR / \"Fairy.sprite3\" / \"d4f6163a1610243f55dd9cf1c9875c61.png\"\n", + "# ,SPRITE_DIR / \"Fairy.sprite3\" / \"decd31f829032b1d4dcf5efdbd362cb9.png\"\n", + "# ,SPRITE_DIR / \"Fish.sprite3\" / \"4a3478b3cdc3e8688a671be88c2775fd.png\"\n", + "# ,SPRITE_DIR / \"Fish.sprite3\" / \"7a0c31c0087f342867d4754f8dc57541.png\"\n", + "# ,SPRITE_DIR / \"Fish.sprite3\" / \"886e0bb732453eb8d3a849b4eab54943.png\"\n", + "# ,SPRITE_DIR / \"Fish.sprite3\" / \"a9b3d163756621f8395592ad77fb9369.png\"\n", + "# ,SPRITE_DIR / \"Fishbowl.sprite3\" / \"17c53cf0296f24722ba5b001d513e58f.png\"\n", + "# ,SPRITE_DIR / \"Fishbowl.sprite3\" / \"b3db01c5cda32fe3ea0b48dde5fa8130.png\"\n", + "# ,SPRITE_DIR / \"Food Truck.sprite3\" / \"a77f9693f87288d023a4632cf019776e.png\"\n", + "# ,SPRITE_DIR / \"Food Truck.sprite3\" / \"e850e3c93de767519f7f78b38f16ed1d.png\"\n", + "# ,SPRITE_DIR / \"Food Truck.sprite3\" / \"f4150de2297a63c3efd125c8e12dd7cc.png\"\n", + "# ,SPRITE_DIR / \"Football.sprite3\" / \"7ee31371b2eafba57cc5a78fc1a787fe.png\"\n", + "# ,SPRITE_DIR / \"Football.sprite3\" / \"c717def72c8bd98749284d31b51d7097.png\"\n", + "# ,SPRITE_DIR / \"Fortune Cookie.sprite3\" / \"c56dcaa1fa4e3c9740142b93d5982850.png\"\n", + "# ,SPRITE_DIR / \"Fox.sprite3\" / \"2c256eacbb753be361e8e52a0eefde77.png\"\n", + "# ,SPRITE_DIR / \"Fox.sprite3\" / \"9dd59a4514b5373d4f665db78e145636.png\"\n", + "# ,SPRITE_DIR / \"Fox.sprite3\" / \"dd398ed81edb60c91ad4805f4437d2fa.png\"\n", + "# ,SPRITE_DIR / \"Frank.sprite3\" / \"10d39bb7e31647a465e747cd243b8cd0.png\"\n", + "# ,SPRITE_DIR / \"Frank.sprite3\" / \"26da9617218493f4f42a1592f21afee8.png\"\n", + "# ,SPRITE_DIR / \"Frank.sprite3\" / \"d16b76a634f7367ce7d6112401a78e57.png\"\n", + "# ,SPRITE_DIR / \"Frank.sprite3\" / \"e56e930cc0229d1042a673e7503209c5.png\"\n", + "# ,SPRITE_DIR / \"Frog 2 .sprite3\" / \"0717f446c991aac7df2fe4d6590354e7.png\"\n", + "# ,SPRITE_DIR / \"Frog 2 .sprite3\" / \"d9f69469090784d8dd68d94c0fd78a50.png\"\n", + "# ,SPRITE_DIR / \"Frog 2 .sprite3\" / \"f2246c13e4540472c484119bc314d954.png\"\n", + "# ,SPRITE_DIR / \"Frog.sprite3\" / \"390845c11df0924f3b627bafeb3f814e.png\"\n", + "# ,SPRITE_DIR / \"Fruit Platter.sprite3\" / \"6c3252378da3334f63eebddbed3fae91.png\"\n", + "# ,SPRITE_DIR / \"Fruit Salad.sprite3\" / \"2e6ef315101433b78e38719e8cc630c2.png\"\n", + "# ,SPRITE_DIR / \"Ghost.sprite3\" / \"40ba3a0b5b3899a655fd8867229d4ee3.png\"\n", + "# ,SPRITE_DIR / \"Ghost.sprite3\" / \"634744e3f98bee53e9cb477a63aa9b21.png\"\n", + "# ,SPRITE_DIR / \"Ghost.sprite3\" / \"d1d89391f1d9c74557e504456d58a002.png\"\n", + "# ,SPRITE_DIR / \"Ghost.sprite3\" / \"f522b08c5757569ad289d67bce290cd0.png\"\n", + "# ,SPRITE_DIR / \"Gift.sprite3\" / \"0fdd104de718c5fc4a65da429468bdbd.png\"\n", + "# ,SPRITE_DIR / \"Gift.sprite3\" / \"6cbeda5d391c6d107f0b853222f344d9.png\"\n", + "# ,SPRITE_DIR / \"Giga Walking.sprite3\" / \"3afad833094d8dff1c4ff79edcaa13d0.png\"\n", + "# ,SPRITE_DIR / \"Giga Walking.sprite3\" / \"d27716e022fb5f747d7b09fe6eeeca06.png\"\n", + "# ,SPRITE_DIR / \"Giga Walking.sprite3\" / \"db55131bf54f96e8986d9b30730e42ce.png\"\n", + "# ,SPRITE_DIR / \"Giga.sprite3\" / \"337b338b2b10176221e638ac537854e6.png\"\n", + "# ,SPRITE_DIR / \"Giga.sprite3\" / \"92161a11e851ecda94cbbb985018fed6.png\"\n", + "# ,SPRITE_DIR / \"Giga.sprite3\" / \"bc706a7648342aaacac9050378b40c43.png\"\n", + "# ,SPRITE_DIR / \"Giga.sprite3\" / \"db15886cfdcb5e2f4459e9074e3990a1.png\"\n", + "# ,SPRITE_DIR / \"Giraffe.sprite3\" / \"43e89629fb9df7051eaf307c695424fc.png\"\n", + "# ,SPRITE_DIR / \"Giraffe.sprite3\" / \"cfd93a103479993aee4d680655e39d8d.png\"\n", + "# ,SPRITE_DIR / \"Giraffe.sprite3\" / \"ef1fca2ae13d49d9dd2c6cfc211a687c.png\"\n", + "# ,SPRITE_DIR / \"Glass Water.sprite3\" / \"ca70c69ef1f797d353581a3f76116ae3.png\"\n", + "# ,SPRITE_DIR / \"Glass Water.sprite3\" / \"cbf21cf1b057852f91135d27ebbf11ce.png\"\n", + "# ,SPRITE_DIR / \"Glasses.sprite3\" / \"705035328ac53d5ce1aa5a1ed1c2d172.png\"\n", + "# ,SPRITE_DIR / \"Glasses.sprite3\" / \"9e2f75d3a09f3f10d554ba8380c3ae52.png\"\n", + "# ,SPRITE_DIR / \"Glasses.sprite3\" / \"acd85b36e6b8d93ba4194ee2ea334207.png\"\n", + "# ,SPRITE_DIR / \"Glasses.sprite3\" / \"f2a02d0e7431147b8a4a282e02a8e6a4.png\"\n", + "# ,SPRITE_DIR / \"Glow-0.sprite3\" / \"64b59074f24d0e2405a509a45c0dadba.png\"\n", + "# ,SPRITE_DIR / \"Glow-1.sprite3\" / \"9f75c26aa6c56168a3e5a4f598de2c94.png\"\n", + "# ,SPRITE_DIR / \"Glow-2.sprite3\" / \"e8d8bf59db37b5012dd643a16a636042.png\"\n", + "# ,SPRITE_DIR / \"Glow-3.sprite3\" / \"57f7afe3b9888cca56803b73a62e4227.png\"\n", + "# ,SPRITE_DIR / \"Glow-4.sprite3\" / \"b8209e1980475b30ff11e60d7633446d.png\"\n", + "# ,SPRITE_DIR / \"Glow-5.sprite3\" / \"aacb5b3cec637f192f080138b4ccd8d2.png\"\n", + "# ,SPRITE_DIR / \"Glow-6.sprite3\" / \"84d9f26050c709e6b98706c22d2efb3d.png\"\n", + "# ,SPRITE_DIR / \"Glow-7.sprite3\" / \"6194b9a251a905d0001a969990961724.png\"\n", + "# ,SPRITE_DIR / \"Glow-8.sprite3\" / \"55e95fb9c60fbebb7d20bba99c7e9609.png\"\n", + "# ,SPRITE_DIR / \"Glow-9.sprite3\" / \"0f53ee6a988bda07cba561d38bfbc36f.png\"\n", + "# ,SPRITE_DIR / \"Glow-A.sprite3\" / \"fd470938cce54248aaf240b16e845456.png\"\n", + "# ,SPRITE_DIR / \"Glow-B.sprite3\" / \"a699fa024889b681d8b8b6c5c86acb6d.png\"\n", + "# ,SPRITE_DIR / \"Glow-C.sprite3\" / \"51b8a7dd7a8cddc5bc30e35824cc557a.png\"\n", + "# ,SPRITE_DIR / \"Glow-D.sprite3\" / \"a3a66e37de8d7ebe0505594e036ef6d1.png\"\n", + "# ,SPRITE_DIR / \"Glow-E.sprite3\" / \"80382a5db3fa556276068165c547b432.png\"\n", + "# ,SPRITE_DIR / \"Glow-F.sprite3\" / \"67239f7d47f7b92bc38e2d8b275d54ab.png\"\n", + "# ,SPRITE_DIR / \"Glow-G.sprite3\" / \"56839bc48957869d980c6f9b6f5a2a91.png\"\n", + "# ,SPRITE_DIR / \"Glow-H.sprite3\" / \"d6016c6494153cd5735ee4b6a1b05277.png\"\n", + "# ,SPRITE_DIR / \"Glow-I.sprite3\" / \"9077988af075c80cc403b1d6e5891528.png\"\n", + "# ,SPRITE_DIR / \"Glow-J.sprite3\" / \"6c359eff57abf5bb6db55894d08757c3.png\"\n", + "# ,SPRITE_DIR / \"Glow-K.sprite3\" / \"e932898d1e6fe3950a266fccaba0c3e6.png\"\n", + "# ,SPRITE_DIR / \"Glow-L.sprite3\" / \"dcee9202cf20e0395971f1ee73c45d37.png\"\n", + "# ,SPRITE_DIR / \"Glow-M.sprite3\" / \"26f81aa5990bf2371acaa8d76fe1e87f.png\"\n", + "# ,SPRITE_DIR / \"Glow-N.sprite3\" / \"d55a04ada14958eccc4aef446a4dad57.png\"\n", + "# ,SPRITE_DIR / \"Glow-O.sprite3\" / \"64b59074f24d0e2405a509a45c0dadba.png\"\n", + "# ,SPRITE_DIR / \"Glow-P.sprite3\" / \"c6edc2603ad4db3aa0b29f80e3e38cff.png\"\n", + "# ,SPRITE_DIR / \"Glow-Q.sprite3\" / \"e4ae18bf8b92ae375ce818d754588c76.png\"\n", + "# ,SPRITE_DIR / \"Glow-R.sprite3\" / \"bb11b49e19c68452331e78d51081ab42.png\"\n", + "# ,SPRITE_DIR / \"Glow-S.sprite3\" / \"6fd994b41bcf776fbf1f1521a879f1af.png\"\n", + "# ,SPRITE_DIR / \"Glow-T.sprite3\" / \"d687543649a676a14f408b5890d45f05.png\"\n", + "# ,SPRITE_DIR / \"Glow-U.sprite3\" / \"cb8ef2244400a57ba08e918cb4fe8bba.png\"\n", + "# ,SPRITE_DIR / \"Glow-V.sprite3\" / \"c6edc1ac2c5979f389598537cfb28096.png\"\n", + "# ,SPRITE_DIR / \"Glow-W.sprite3\" / \"2e0c2bb46c4ca3cf97779f749b1556f6.png\"\n", + "# ,SPRITE_DIR / \"Glow-X.sprite3\" / \"0b98a63dcc55251072a95a6c6bf7f6f2.png\"\n", + "# ,SPRITE_DIR / \"Glow-Y.sprite3\" / \"532494c9b5e6709f9982c00a48ce6870.png\"\n", + "# ,SPRITE_DIR / \"Glow-Z.sprite3\" / \"2d94d83dcc9ee3a107e5ea7ef0dddeb0.png\"\n", + "# ,SPRITE_DIR / \"Goalie.sprite3\" / \"59eedd0a23c3c983d386a0c125991c7f.png\"\n", + "# ,SPRITE_DIR / \"Goalie.sprite3\" / \"63f2955298d59dd22dc7b7c6a9c521e2.png\"\n", + "# ,SPRITE_DIR / \"Goalie.sprite3\" / \"a554f2a9b49a09ec67d1fd7ecfbcddcd.png\"\n", + "# ,SPRITE_DIR / \"Goalie.sprite3\" / \"eb096e2b4234f5f8ee1f2c44429eaa1a.png\"\n", + "# ,SPRITE_DIR / \"Goalie.sprite3\" / \"f2e7ba53f3a28c4359cb0d3e3cb4001a.png\"\n", + "# ,SPRITE_DIR / \"Goblin.sprite3\" / \"2add9ef4eaa25f8915406dcfd8bafc9f.png\"\n", + "# ,SPRITE_DIR / \"Goblin.sprite3\" / \"3f08380f25062b8055a1800f5dad14bd.png\"\n", + "# ,SPRITE_DIR / \"Goblin.sprite3\" / \"afb9fe328adae617ee3375366fca02e7.png\"\n", + "# ,SPRITE_DIR / \"Goblin.sprite3\" / \"b8604b8039d6b633015aaf17d74d5d5b.png\"\n", + "# ,SPRITE_DIR / \"Gobo.sprite3\" / \"5c0896569305ab177d87caa31aad2a72.png\"\n", + "# ,SPRITE_DIR / \"Gobo.sprite3\" / \"9d8021c216fb92cc708e1e96f3ed2b52.png\"\n", + "# ,SPRITE_DIR / \"Gobo.sprite3\" / \"f505a4e9eab5e40e2669a4462dba4c90.png\"\n", + "# ,SPRITE_DIR / \"Grasshopper.sprite3\" / \"529644c5ecdca63adafd87777e341ad7.png\"\n", + "# ,SPRITE_DIR / \"Grasshopper.sprite3\" / \"93550d8abde130ad149904c4448f8b65.png\"\n", + "# ,SPRITE_DIR / \"Grasshopper.sprite3\" / \"a7c638b8aa86f2a758830f8c2b0e4cf5.png\"\n", + "# ,SPRITE_DIR / \"Grasshopper.sprite3\" / \"cf2ac769df444137b4c1eec472fa4b92.png\"\n", + "# ,SPRITE_DIR / \"Grasshopper.sprite3\" / \"d4f3dfe69be6537e73544381408a820d.png\"\n", + "# ,SPRITE_DIR / \"Grasshopper.sprite3\" / \"e7210a370837dd1e4ebc1a56a973b7f6.png\"\n", + "# ,SPRITE_DIR / \"Green Flag.sprite3\" / \"2bbfd072183a67db5eddb923fe0726b3.png\"\n", + "# ,SPRITE_DIR / \"Griffin.sprite3\" / \"102f6200c13bd60afa9538c712776fb0.png\"\n", + "# ,SPRITE_DIR / \"Griffin.sprite3\" / \"157d3665cebcd41fa814b9217af99476.png\"\n", + "# ,SPRITE_DIR / \"Griffin.sprite3\" / \"a31166d45903206b52cb0f0a0cb687b5.png\"\n", + "# ,SPRITE_DIR / \"Griffin.sprite3\" / \"b8c8745820a341afec08e77f4a254551.png\"\n", + "# ,SPRITE_DIR / \"Guitar-electric1.sprite3\" / \"57c6d7dc148576cb2f36e53dea49260a.png\"\n", + "# ,SPRITE_DIR / \"Guitar-electric1.sprite3\" / \"677aed0b1168caf4b3ec565b9104dbe0.png\"\n", + "# ,SPRITE_DIR / \"Guitar-electric2.sprite3\" / \"83db2d0e342257e534ccdf0ec17bf668.png\"\n", + "# ,SPRITE_DIR / \"Guitar-electric2.sprite3\" / \"bb88e6a8a08a4034cc155b1137743ca1.png\"\n", + "# ,SPRITE_DIR / \"Guitar.sprite3\" / \"8704489dcf1a3ca93c5db40ebe5acd38.png\"\n", + "# ,SPRITE_DIR / \"Guitar.sprite3\" / \"e0423f4743f39456dade16fa1223d6b0.png\"\n", + "# ,SPRITE_DIR / \"Hannah.sprite3\" / \"5fdce07935156bbcf943793fa84e826c.png\"\n", + "# ,SPRITE_DIR / \"Hannah.sprite3\" / \"b983d99560313e38b4b3cd36cbd5f0d1.png\"\n", + "# ,SPRITE_DIR / \"Hannah.sprite3\" / \"d0c3b4b24fbf1152de3ebb68f6b875ae.png\"\n", + "# ,SPRITE_DIR / \"Hare.sprite3\" / \"7269593d83b6f9eae512997f541a7417.png\"\n", + "# ,SPRITE_DIR / \"Hare.sprite3\" / \"85a3b8c151e10576fa531a4293fdac00.png\"\n", + "# ,SPRITE_DIR / \"Hare.sprite3\" / \"c8dbb4302dd489a201938c203018c2f0.png\"\n", + "# ,SPRITE_DIR / \"Harper.sprite3\" / \"3a0973a042ee16e816c568651316d5d4.png\"\n", + "# ,SPRITE_DIR / \"Harper.sprite3\" / \"98ce6e6bb99f8ba116f127fdf2e739fd.png\"\n", + "# ,SPRITE_DIR / \"Harper.sprite3\" / \"e407fa0ed992393d12d0a108c11e2fa6.png\"\n", + "# ,SPRITE_DIR / \"Hat1 .sprite3\" / \"0aed53a86d92ec2283068000ac97a60b.png\"\n", + "# ,SPRITE_DIR / \"Hat1 .sprite3\" / \"13e382ae3f05a9a23e0b64ca23230438.png\"\n", + "# ,SPRITE_DIR / \"Hat1 .sprite3\" / \"6349e36da9897a2f89bdbf5c77dbdacb.png\"\n", + "# ,SPRITE_DIR / \"Hat1 .sprite3\" / \"c632719725400c604fcadf0858ce2b2c.png\"\n", + "# ,SPRITE_DIR / \"Hatchling.sprite3\" / \"0e5c295a043d5e183a98046e4f734b72.png\"\n", + "# ,SPRITE_DIR / \"Hatchling.sprite3\" / \"55f7d457eb0af78cb309ca47497c490f.png\"\n", + "# ,SPRITE_DIR / \"Hatchling.sprite3\" / \"f27d557be70a9522fae4392bfd4f5249.png\"\n", + "# ,SPRITE_DIR / \"Heart Candy.sprite3\" / \"288976865e8c5db717d859e915606d82.png\"\n", + "# ,SPRITE_DIR / \"Heart Candy.sprite3\" / \"3ee430ba825f41ae9913453d4932fb8b.png\"\n", + "# ,SPRITE_DIR / \"Heart Candy.sprite3\" / \"51248e76be2aa7a0f0ed77bc94af1b3a.png\"\n", + "# ,SPRITE_DIR / \"Heart Candy.sprite3\" / \"5fa8c4693cf8cba8cdbcbed72f4f58aa.png\"\n", + "# ,SPRITE_DIR / \"Heart Face.sprite3\" / \"989770846f8cd1628b48bbe91d0a7d0d.png\"\n", + "# ,SPRITE_DIR / \"Heart.sprite3\" / \"c77e640f6e023e7ce1e376da0f26e1eb.png\"\n", + "# ,SPRITE_DIR / \"Heart.sprite3\" / \"e24731f5cf2759c2f289921bebb86ea2.png\"\n", + "# ,SPRITE_DIR / \"Hedgehog.sprite3\" / \"1fcbba4a2252e96c52d2d8aa8e593e51.png\"\n", + "# ,SPRITE_DIR / \"Hedgehog.sprite3\" / \"3251533232e7f44315512149c7f76214.png\"\n", + "# ,SPRITE_DIR / \"Hedgehog.sprite3\" / \"3b0e1717859808cecf1a45e2a32dc201.png\"\n", + "# ,SPRITE_DIR / \"Hedgehog.sprite3\" / \"42bac40ca828133600e0a9f7ba019adb.png\"\n", + "# ,SPRITE_DIR / \"Hedgehog.sprite3\" / \"93c2d7a0abefaf26ee50d5038ac5bf61.png\"\n", + "# ,SPRITE_DIR / \"Hen.sprite3\" / \"6c9e05f568862dbcea0a1652a210239b.png\"\n", + "# ,SPRITE_DIR / \"Hen.sprite3\" / \"b02a33e32313cc9a75781a6fafd07033.png\"\n", + "# ,SPRITE_DIR / \"Hen.sprite3\" / \"c9a4570a2d0ae09b9feeeb5607e4b9c7.png\"\n", + "# ,SPRITE_DIR / \"Hen.sprite3\" / \"d055896a473bb12f4ec67af1fdb9c652.png\"\n", + "# ,SPRITE_DIR / \"Hippo1.sprite3\" / \"5764a2c650f225bc27cc0e6c5db401ea.png\"\n", + "# ,SPRITE_DIR / \"Hippo1.sprite3\" / \"911901dc568b56c15fe81819bc2af653.png\"\n", + "# ,SPRITE_DIR / \"Home Button.sprite3\" / \"1ebdcb9f033fa6658259b52da376b7ac.png\"\n", + "# ,SPRITE_DIR / \"Horse.sprite3\" / \"0e0fa871bea01c2dfb70e9955dc098be.png\"\n", + "# ,SPRITE_DIR / \"Horse.sprite3\" / \"ad458251c5bf5b375870829f1762fa47.png\"\n", + "# ,SPRITE_DIR / \"Jaime.sprite3\" / \"3ddc912edef87ae29121f57294fa0cb5.png\"\n", + "# ,SPRITE_DIR / \"Jaime.sprite3\" / \"4b9d2162e30dbb924840575ed35fddb0.png\"\n", + "# ,SPRITE_DIR / \"Jaime.sprite3\" / \"5883bdefba451aaeac8d77c798d41eb0.png\"\n", + "# ,SPRITE_DIR / \"Jaime.sprite3\" / \"5a683f4536abca0f83a77bc341df4c9a.png\"\n", + "# ,SPRITE_DIR / \"Jaime.sprite3\" / \"63e56d28cc3e3d9b735e1f1d51248cc0.png\"\n", + "# ,SPRITE_DIR / \"Jaime.sprite3\" / \"7fb579a98d6db257f1b16109d3c4609a.png\"\n", + "# ,SPRITE_DIR / \"Jaime.sprite3\" / \"d6cc9814f7a6640e4c2b1a4276987dc5.png\"\n", + "# ,SPRITE_DIR / \"Jamal.sprite3\" / \"2408318e743873c7254db1623441b9c5.png\"\n", + "# ,SPRITE_DIR / \"Jamal.sprite3\" / \"3c8d5e688450ad1e6bf024a32c55bcda.png\"\n", + "# ,SPRITE_DIR / \"Jamal.sprite3\" / \"693748d763c8da4b119a5e4bee6a1768.png\"\n", + "# ,SPRITE_DIR / \"Jamal.sprite3\" / \"92692e0c0f376797274392484ba74133.png\"\n", + "# ,SPRITE_DIR / \"Jar.sprite3\" / \"33b537168f3c2eb3dafeb739c22f38a6.png\"\n", + "# ,SPRITE_DIR / \"Jar.sprite3\" / \"e0f5ac773987470ff2467e3e01b9ab23.png\"\n", + "# ,SPRITE_DIR / \"Jellyfish.sprite3\" / \"00c99df84f8385038461d6c42a5465ab.png\"\n", + "# ,SPRITE_DIR / \"Jellyfish.sprite3\" / \"3158299771b3d34ed2c50a00fbab715e.png\"\n", + "# ,SPRITE_DIR / \"Jellyfish.sprite3\" / \"4e259b7c08f05145fc7800b33e4f356e.png\"\n", + "# ,SPRITE_DIR / \"Jellyfish.sprite3\" / \"5944a1e687fa31589517825b2144a17b.png\"\n", + "# ,SPRITE_DIR / \"Jordyn.sprite3\" / \"00c8c464c19460df693f8d5ae69afdab.png\"\n", + "# ,SPRITE_DIR / \"Jordyn.sprite3\" / \"768c4601174f0dfcb96b3080ccc3a192.png\"\n", + "# ,SPRITE_DIR / \"Jordyn.sprite3\" / \"a7cc1e5f02b58ecc8095cfc18eef0289.png\"\n", + "# ,SPRITE_DIR / \"Jordyn.sprite3\" / \"db4d97cbf24e2b8af665bfbf06f67fa0.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"01dd2f553c7262329ebaba2516e3a2b1.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"0ed4a09c41871d150c51119c1bceded2.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"2e2a6534d33883fdd2f8471a1adbebb7.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"3d3ea804243800981acabc7caba10939.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"6f68790ee3eb9bdccf8749305186b0dd.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"75ee2383fd83992b401c8a0730521d94.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"a12f40b18067bb31746f9cf461de88aa.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"a55fbb529c10f70bcb374aef8a63571b.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"a9fbc01a4124d555da12630312e46197.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"aabfedff0d11243386b6b0941e0f72e9.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"c2d5519e8a0f2214ff757117038c28dc.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"ea812b4c2b2405aa2b73158023298f71.png\"\n", + "# ,SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"efaa8eb6c8cf7dc35d4d37d546ebd333.png\"\n", + "# ,SPRITE_DIR / \"Kai.sprite3\" / \"6e007fde15e49c66ee7996561f80b452.png\"\n", + "# ,SPRITE_DIR / \"Kai.sprite3\" / \"c1e1149f6d7e308e3e4eba14ccc8a751.png\"\n", + "# ,SPRITE_DIR / \"Key.sprite3\" / \"680d3e4dce002f922b32447fcf29743d.png\"\n", + "# ,SPRITE_DIR / \"Keyboard.sprite3\" / \"0ad880b5e829578832c8927b3f6ef7f8.png\"\n", + "# ,SPRITE_DIR / \"Keyboard.sprite3\" / \"6efd23c91dab070526feacdf72e2d3da.png\"\n", + "# ,SPRITE_DIR / \"Kia.sprite3\" / \"b3d0a248adbc26b0d0826e042a81670a.png\"\n", + "# ,SPRITE_DIR / \"Kia.sprite3\" / \"db6cd6b145bb6d8dc299475af7423d6e.png\"\n", + "# ,SPRITE_DIR / \"Kia.sprite3\" / \"e56e480c994572323d88355b8733e1a3.png\"\n", + "# ,SPRITE_DIR / \"Kiran.sprite3\" / \"2928e9fbd5ca08e326192b3a41bea691.png\"\n", + "# ,SPRITE_DIR / \"Kiran.sprite3\" / \"78bd6de23d4929aef678ddf0f3f5c276.png\"\n", + "# ,SPRITE_DIR / \"Kiran.sprite3\" / \"7912b6f378bd781f62683e003c574dbe.png\"\n", + "# ,SPRITE_DIR / \"Kiran.sprite3\" / \"7c0bedab5404830a5147cc4a2d46e997.png\"\n", + "# ,SPRITE_DIR / \"Kiran.sprite3\" / \"7f0bc123819fc2666321b6cd38069bdb.png\"\n", + "# ,SPRITE_DIR / \"Kiran.sprite3\" / \"b0566e0eed7b5216b92d61468d21ecee.png\"\n", + "# ,SPRITE_DIR / \"Knight.sprite3\" / \"188325c56b79ff3cd58497c970ba87a6.png\"\n", + "# ,SPRITE_DIR / \"Ladybug1.sprite3\" / \"169c0efa8c094fdedddf8c19c36f0229.png\"\n", + "# ,SPRITE_DIR / \"Ladybug2.sprite3\" / \"3f48228829b77fc47d6d89b5729b2957.png\"\n", + "# ,SPRITE_DIR / \"Ladybug2.sprite3\" / \"457200f8dec8fea00d22473e9bd9175e.png\"\n", + "# ,SPRITE_DIR / \"Laptop.sprite3\" / \"cd2d1f72275e676df5f82be74ae91dfa.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"0725440743391e7c622bb5df6a94e1d4.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"0a2461b3b9a4b8603e75565d78b1d4d7.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"4423159d81378ada5ffd7f053d7ef471.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"525285312925e1e6b4e237a119b61305.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"563f86443cb102b9241cebb62eb2d81a.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"5f176ef763be18f7c342dc2e2de7bf16.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"63d099e94aa8a973dcfa4c5d8b4a3e7a.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"645d6e2674452009df7a9a844a604791.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"71dde8c43985815bffb5a5ed5632af58.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"79ca528d13ffb557a236f0a35a0eb486.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"b508808c087adb55ce156f5cfbdac61b.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"cdd52259075b75628001672d375e4985.png\"\n", + "# ,SPRITE_DIR / \"LB Dance.sprite3\" / \"e68d899e178309ff3eae3e1de8a8ec28.png\"\n", + "# ,SPRITE_DIR / \"Lightning.sprite3\" / \"0ddd3a05a330925bcd2d048908ed40b8.png\"\n", + "# ,SPRITE_DIR / \"Line.sprite3\" / \"e85305b47cfd92d971704dcb7ad6e17b.png\"\n", + "# ,SPRITE_DIR / \"Lion.sprite3\" / \"91c64c5361d906fd36d5813ae27b85a8.png\"\n", + "# ,SPRITE_DIR / \"Lion.sprite3\" / \"e88e83c8b3ca80c54540b5f0c5a0cc03.png\"\n", + "# ,SPRITE_DIR / \"Lion.sprite3\" / \"f0d9ab3d82bbade6e279dc1c81e2e6db.png\"\n", + "# ,SPRITE_DIR / \"Llama.sprite3\" / \"1f3aaeb598e121ad817143800d8c4a32.png\"\n", + "# ,SPRITE_DIR / \"Llama.sprite3\" / \"ac80d75745315f052f7f7b4e62e4a850.png\"\n", + "# ,SPRITE_DIR / \"Llama.sprite3\" / \"c97824f20a45adfa3ff362f82247a025.png\"\n", + "# ,SPRITE_DIR / \"Luca.sprite3\" / \"18dfad514602a4907502c7c84861b24e.png\"\n", + "# ,SPRITE_DIR / \"Luca.sprite3\" / \"90fa2ad340edc6e6ba963710feef940e.png\"\n", + "# ,SPRITE_DIR / \"Luca.sprite3\" / \"963cb82687acaf5de53a22b287192723.png\"\n", + "# ,SPRITE_DIR / \"Magic Wand.sprite3\" / \"89aa5332042d7bbf8368293a4efeafa4.png\"\n", + "# ,SPRITE_DIR / \"Marian.sprite3\" / \"16893c6136292ae36e13dc72cc55719b.png\"\n", + "# ,SPRITE_DIR / \"Marian.sprite3\" / \"221e9999b20ecc21b37c68fcdf09ab02.png\"\n", + "# ,SPRITE_DIR / \"Marian.sprite3\" / \"3d2ecee35eab8c37d1c3eadfe50ce447.png\"\n", + "# ,SPRITE_DIR / \"Marian.sprite3\" / \"64206b46c411e40926569cf3f5e587be.png\"\n", + "# ,SPRITE_DIR / \"Marian.sprite3\" / \"e9577a1eb098905dd386135bb38c0398.png\"\n", + "# ,SPRITE_DIR / \"Max.sprite3\" / \"5180649cfd62831c52f8994ce644d6ac.png\"\n", + "# ,SPRITE_DIR / \"Max.sprite3\" / \"6b91183a4ad162e4950d95828a85144d.png\"\n", + "# ,SPRITE_DIR / \"Max.sprite3\" / \"7b3d1324382032f87384ef2c8c618156.png\"\n", + "# ,SPRITE_DIR / \"Max.sprite3\" / \"9669ce16eb6c6df6f26686598a59711d.png\"\n", + "# ,SPRITE_DIR / \"Mermaid.sprite3\" / \"2a6274017350fab67ebec9157420ae96.png\"\n", + "# ,SPRITE_DIR / \"Mermaid.sprite3\" / \"65419296861b1c7ee59075af0f949d67.png\"\n", + "# ,SPRITE_DIR / \"Mermaid.sprite3\" / \"88a3b6b2f0b3ffa25cab97bc619f8386.png\"\n", + "# ,SPRITE_DIR / \"Mermaid.sprite3\" / \"f903049308e2171178d889f5c4a7d466.png\"\n", + "# ,SPRITE_DIR / \"Microphone.sprite3\" / \"c96578ffb9e314fee097862d69fde0af.png\"\n", + "# ,SPRITE_DIR / \"Microphone.sprite3\" / \"d4d80e94e2cc759b8ca1d7b58f2a9052.png\"\n", + "# ,SPRITE_DIR / \"Milk.sprite3\" / \"0f683f65c737bbcbb916df0895d8436e.png\"\n", + "# ,SPRITE_DIR / \"Milk.sprite3\" / \"1fa49d62f8028a375470e7bac451e666.png\"\n", + "# ,SPRITE_DIR / \"Milk.sprite3\" / \"4d3eabd3ef848b61c3120d796c274733.png\"\n", + "# ,SPRITE_DIR / \"Milk.sprite3\" / \"6ec300ae45758eff12e9d47cf4f0d2a0.png\"\n", + "# ,SPRITE_DIR / \"Milk.sprite3\" / \"aa5f1501805aa68d3ad74623f59e6135.png\"\n", + "# ,SPRITE_DIR / \"Monet.sprite3\" / \"137bbc522701a96908667d1b1730d041.png\"\n", + "# ,SPRITE_DIR / \"Monet.sprite3\" / \"138e6591f3317222521963ef3ce9a057.png\"\n", + "# ,SPRITE_DIR / \"Monet.sprite3\" / \"4c6b016c55c4348b6dce29ba99e7ede4.png\"\n", + "# ,SPRITE_DIR / \"Monet.sprite3\" / \"5b67cb843dcc9dabdc580b9e35e95659.png\"\n", + "# ,SPRITE_DIR / \"Monet.sprite3\" / \"740276a8aa9ddd12dd4b30f369975d66.png\"\n", + "# ,SPRITE_DIR / \"Monkey.sprite3\" / \"254926ee81bfa82f2db7009a80635061.png\"\n", + "# ,SPRITE_DIR / \"Monkey.sprite3\" / \"de0405b0576ade1282bdfcd198922baa.png\"\n", + "# ,SPRITE_DIR / \"Monkey.sprite3\" / \"ec6d62f0ff64bb5440ffdc662b6e46fa.png\"\n", + "# ,SPRITE_DIR / \"Motorcycle.sprite3\" / \"6e960b3c6a60ebe192e36b235c50ae03.png\"\n", + "# ,SPRITE_DIR / \"Motorcycle.sprite3\" / \"a70bdd403ace1f1ece2f2af0fbc3c720.png\"\n", + "# ,SPRITE_DIR / \"Motorcycle.sprite3\" / \"b73447c2577b8f77b5e2eb1da6d6445a.png\"\n", + "# ,SPRITE_DIR / \"Motorcycle.sprite3\" / \"c6f8179ff3e8f8ab08b01d50343eefc4.png\"\n", + "# ,SPRITE_DIR / \"Mouse1.sprite3\" / \"8a7da35c473972f88896ca73b7df2188.png\"\n", + "# ,SPRITE_DIR / \"Mouse1.sprite3\" / \"c5f76b65e30075c12d49ea8a8f7d6bad.png\"\n", + "# ,SPRITE_DIR / \"Muffin.sprite3\" / \"afa34381db44e699d61f774911aab448.png\"\n", + "# ,SPRITE_DIR / \"Muffin.sprite3\" / \"bd0581902cd6cc13888520776bf1620c.png\"\n", + "# ,SPRITE_DIR / \"Nano.sprite3\" / \"8f2f4a70e87262ef478ce60567b6208a.png\"\n", + "# ,SPRITE_DIR / \"Nano.sprite3\" / \"a4e2034751fa650fd5fd69432c110104.png\"\n", + "# ,SPRITE_DIR / \"Nano.sprite3\" / \"a62e560863c0e49b12e5d57e13d084f1.png\"\n", + "# ,SPRITE_DIR / \"Nano.sprite3\" / \"d12aead3e3c2917e7eba8b2b90a7afd2.png\"\n", + "# ,SPRITE_DIR / \"Neigh Pony.sprite3\" / \"592816f56409d582603c485cbefcbbb8.png\"\n", + "# ,SPRITE_DIR / \"Noor.sprite3\" / \"4cf233c6540e434aded60608ba316ce3.png\"\n", + "# ,SPRITE_DIR / \"Noor.sprite3\" / \"975585ca9461f0730a285fc96df73425.png\"\n", + "# ,SPRITE_DIR / \"Noor.sprite3\" / \"c1792bbd5970034b4595ff7e742d6e47.png\"\n", + "# ,SPRITE_DIR / \"Octopus.sprite3\" / \"5d6e17d6260134d0402ba487a419d7c3.png\"\n", + "# ,SPRITE_DIR / \"Octopus.sprite3\" / \"7d33a531087188b29deae879f23f76bc.png\"\n", + "# ,SPRITE_DIR / \"Octopus.sprite3\" / \"9b5a2cd287229bf36ffcc176ed72cc0c.png\"\n", + "# ,SPRITE_DIR / \"Octopus.sprite3\" / \"e22d9b633feffc1d026980a1f21e07d7.png\"\n", + "# ,SPRITE_DIR / \"Octopus.sprite3\" / \"f582f162c4438d82c9e2a0a87a3e02ce.png\"\n", + "# ,SPRITE_DIR / \"Orange.sprite3\" / \"d0a55aae1decb57152b454c9a5226757.png\"\n", + "# ,SPRITE_DIR / \"Orange2.sprite3\" / \"27286ca08451bc512e1d611965dad061.png\"\n", + "# ,SPRITE_DIR / \"Orange2.sprite3\" / \"b823f73a31e61fd362574e2c24dfc0c2.png\"\n", + "# ,SPRITE_DIR / \"Outfielder.sprite3\" / \"10578b06f97b9fdc34f622e9e682c144.png\"\n", + "# ,SPRITE_DIR / \"Outfielder.sprite3\" / \"175ddc7ed99cc5b72909098046d8f558.png\"\n", + "# ,SPRITE_DIR / \"Outfielder.sprite3\" / \"9f31c772f88a5f32fe857d57b3bcb04c.png\"\n", + "# ,SPRITE_DIR / \"Outfielder.sprite3\" / \"d0a8837867d39444a824b734d4cd5554.png\"\n", + "# ,SPRITE_DIR / \"Owl.sprite3\" / \"236bb6b33e7db00834bcea89b03b8a5e.png\"\n", + "# ,SPRITE_DIR / \"Owl.sprite3\" / \"806139207066cb5eaef727d54c1bb4ec.png\"\n", + "# ,SPRITE_DIR / \"Owl.sprite3\" / \"a518f70b65ec489e709795209b43207a.png\"\n", + "# ,SPRITE_DIR / \"Paddle.sprite3\" / \"15864fac7d38bb94c1ec3a199de96c26.png\"\n", + "# ,SPRITE_DIR / \"Panther.sprite3\" / \"0e7c244f54b27058f8b17d9e0d3cee12.png\"\n", + "# ,SPRITE_DIR / \"Panther.sprite3\" / \"4a762fd04901407544d8858adac2b3fa.png\"\n", + "# ,SPRITE_DIR / \"Panther.sprite3\" / \"a7aee991f51636574625c1300f035bdd.png\"\n", + "# ,SPRITE_DIR / \"Pants.sprite3\" / \"ac9c7259873e472c2c1a99339c694f16.png\"\n", + "# ,SPRITE_DIR / \"Pants.sprite3\" / \"ef8b1576f183222a4c2d373a7bc194cc.png\"\n", + "# ,SPRITE_DIR / \"Parrot.sprite3\" / \"036fad20b674197358f8c0b2dc64e17e.png\"\n", + "# ,SPRITE_DIR / \"Parrot.sprite3\" / \"082f371c206f07d20e53595a9c69cc22.png\"\n", + "# ,SPRITE_DIR / \"Party Hats.sprite3\" / \"1d14be44e4aa99a471115cd874204690.png\"\n", + "# ,SPRITE_DIR / \"Party Hats.sprite3\" / \"8b43413906cf1ba1343580d3ca062048.png\"\n", + "# ,SPRITE_DIR / \"Party Hats.sprite3\" / \"abefb98344ece228afeb462f46d6b750.png\"\n", + "# ,SPRITE_DIR / \"Pencil.sprite3\" / \"b3d6eae85f285dd618bf9dcf609b9454.png\"\n", + "# ,SPRITE_DIR / \"Pencil.sprite3\" / \"f017876452a24d118fc0b1753caefad9.png\"\n", + "# ,SPRITE_DIR / \"Penguin 2.sprite3\" / \"280d2aa13f0c6774cc8828dc177aaf60.png\"\n", + "# ,SPRITE_DIR / \"Penguin 2.sprite3\" / \"428772307d90f4b347d6cc3c0d8e76ef.png\"\n", + "# ,SPRITE_DIR / \"Penguin 2.sprite3\" / \"780467f3d173dcb37fd65834841babc6.png\"\n", + "# ,SPRITE_DIR / \"Penguin 2.sprite3\" / \"d485f5620d2dde69a6aa1cda7c897d12.png\"\n", + "# ,SPRITE_DIR / \"Penguin.sprite3\" / \"6d11aedea7f316215aaa0d08617f4c31.png\"\n", + "# ,SPRITE_DIR / \"Penguin.sprite3\" / \"c434b674f2da18ba13cdfe51dbc05ecc.png\"\n", + "# ,SPRITE_DIR / \"Penguin.sprite3\" / \"dad5b0d82cb6e053d1ded2ef537a9453.png\"\n", + "# ,SPRITE_DIR / \"Pico Walking.sprite3\" / \"22fb16ae7cc18187a7adaf2852f07884.png\"\n", + "# ,SPRITE_DIR / \"Pico Walking.sprite3\" / \"52a60eccb624530fd3a24fc41fbad6e5.png\"\n", + "# ,SPRITE_DIR / \"Pico Walking.sprite3\" / \"702bd644d01ea8eda2ea122daeea7d74.png\"\n", + "# ,SPRITE_DIR / \"Pico Walking.sprite3\" / \"c8f58f31cabf4acabb3f828730061276.png\"\n", + "# ,SPRITE_DIR / \"Pico.sprite3\" / \"a7597b1f0c13455d335a3d4fe77da528.png\"\n", + "# ,SPRITE_DIR / \"Pico.sprite3\" / \"bcc0e8a5dda3a813608902b887c87bb4.png\"\n", + "# ,SPRITE_DIR / \"Pico.sprite3\" / \"d6dfa2efe58939af4c85755feb3c0375.png\"\n", + "# ,SPRITE_DIR / \"Pico.sprite3\" / \"e7ce31db37f7abd2901499db2e9ad83a.png\"\n", + "# ,SPRITE_DIR / \"Pitcher.sprite3\" / \"049132404cb2cb157830aaf18aee6a24.png\"\n", + "# ,SPRITE_DIR / \"Pitcher.sprite3\" / \"ae8aa57ce6e5729d30d8b785bec97774.png\"\n", + "# ,SPRITE_DIR / \"Pitcher.sprite3\" / \"bceae719ba1ec230afec56f14a1e4d52.png\"\n", + "# ,SPRITE_DIR / \"Pitcher.sprite3\" / \"fc955dec7f1e97f1ddd9f8245a80907e.png\"\n", + "# ,SPRITE_DIR / \"Planet2.sprite3\" / \"50cde8a4a737da0eba1ab73eb263f836.png\"\n", + "# ,SPRITE_DIR / \"Polar Bear.sprite3\" / \"11d00a06abd2c882672464f4867e90b6.png\"\n", + "# ,SPRITE_DIR / \"Polar Bear.sprite3\" / \"5d7cd81aad80100368b8b77bf09ad576.png\"\n", + "# ,SPRITE_DIR / \"Polar Bear.sprite3\" / \"d050a3394b61ade080f7963c40192e7d.png\"\n", + "# ,SPRITE_DIR / \"Potion.sprite3\" / \"0eceab4561534dde827bf68233f47441.png\"\n", + "# ,SPRITE_DIR / \"Potion.sprite3\" / \"d922ffdfe38fd30fd8787810c6bce318.png\"\n", + "# ,SPRITE_DIR / \"Potion.sprite3\" / \"f8500e9530bf1136c6386f2a329519dd.png\"\n", + "# ,SPRITE_DIR / \"Prince.sprite3\" / \"ada9c5ce11245c467c780bceb665c42d.png\"\n", + "# ,SPRITE_DIR / \"Princess.sprite3\" / \"0721f5238a2bcde49d05f72ca9d21d9b.png\"\n", + "# ,SPRITE_DIR / \"Princess.sprite3\" / \"23330150c0a09180083b597cbfeca99a.png\"\n", + "# ,SPRITE_DIR / \"Princess.sprite3\" / \"39157d5d3280ab0b273260170d5436c2.png\"\n", + "# ,SPRITE_DIR / \"Princess.sprite3\" / \"ba37f578cc6cabce6fe4d2864c9eb96f.png\"\n", + "# ,SPRITE_DIR / \"Princess.sprite3\" / \"e59f55c86ea557bdbd88302012ce8db5.png\"\n", + "# ,SPRITE_DIR / \"Pufferfish.sprite3\" / \"1b4f39763c9848cc840522b95cc6d8ae.png\"\n", + "# ,SPRITE_DIR / \"Pufferfish.sprite3\" / \"2266c6bb2c3a8fb80783518a08852b4a.png\"\n", + "# ,SPRITE_DIR / \"Pufferfish.sprite3\" / \"b8aa1bd46eacc054c695b89167c3ad28.png\"\n", + "# ,SPRITE_DIR / \"Pufferfish.sprite3\" / \"e73e71718306f6c7085305dba142c315.png\"\n", + "# ,SPRITE_DIR / \"Puppy.sprite3\" / \"05630bfa94501a3e5d61ce443a0cea70.png\"\n", + "# ,SPRITE_DIR / \"Puppy.sprite3\" / \"2768d9e44a0aab055856d301bbc2b04e.png\"\n", + "# ,SPRITE_DIR / \"Puppy.sprite3\" / \"c4aeb5c39b39ef57a3f18ace54cf7db1.png\"\n", + "# ,SPRITE_DIR / \"Puppy.sprite3\" / \"c7817052ed9e78057f877d0d56b5c6a6.png\"\n", + "# ,SPRITE_DIR / \"Rabbit.sprite3\" / \"137976ec71439e2f986caeaa70e4c932.png\"\n", + "# ,SPRITE_DIR / \"Rabbit.sprite3\" / \"1ca3f829a2c9f7fa4d1df295fe5f787c.png\"\n", + "# ,SPRITE_DIR / \"Rabbit.sprite3\" / \"49169d752f20d27fb71022b16044d759.png\"\n", + "# ,SPRITE_DIR / \"Rabbit.sprite3\" / \"90677c6f16380ef077d6115f6a6371ff.png\"\n", + "# ,SPRITE_DIR / \"Rabbit.sprite3\" / \"970f886bfa454e1daa6d6c30ef49a972.png\"\n", + "# ,SPRITE_DIR / \"Radio.sprite3\" / \"828f0762d028605f6fe52f9287555b74.png\"\n", + "# ,SPRITE_DIR / \"Radio.sprite3\" / \"e96676f038fc523b40392dc1676552dc.png\"\n", + "# ,SPRITE_DIR / \"Rainbow.sprite3\" / \"033979eba12e4572b2520bd93a87583e.png\"\n", + "# ,SPRITE_DIR / \"Referee.sprite3\" / \"1cd641a48499db84636d983916b62a83.png\"\n", + "# ,SPRITE_DIR / \"Referee.sprite3\" / \"46dde2baba61a7e48463ae8e58441470.png\"\n", + "# ,SPRITE_DIR / \"Referee.sprite3\" / \"5948c4160089fcc0975a867221ff2256.png\"\n", + "# ,SPRITE_DIR / \"Referee.sprite3\" / \"7eeca5313c2e7d455482badff3079f64.png\"\n", + "# ,SPRITE_DIR / \"Reindeer.sprite3\" / \"60993a025167e7886736109dca5d55e2.png\"\n", + "# ,SPRITE_DIR / \"Retro Robot.sprite3\" / \"35070c1078c4eec153ea2769516c922c.png\"\n", + "# ,SPRITE_DIR / \"Retro Robot.sprite3\" / \"53398a713b144ecda6ec32fb4a8d28e1.png\"\n", + "# ,SPRITE_DIR / \"Retro Robot.sprite3\" / \"d139f89665962dcaab4cb2b246359ba1.png\"\n", + "# ,SPRITE_DIR / \"Ripley.sprite3\" / \"043373c51689f3df8bf50eb12c4e3d39.png\"\n", + "# ,SPRITE_DIR / \"Ripley.sprite3\" / \"3ab169f52ea3783270d28ef035a5a7c5.png\"\n", + "# ,SPRITE_DIR / \"Ripley.sprite3\" / \"8e173178d886d1cb272877e8923d651b.png\"\n", + "# ,SPRITE_DIR / \"Ripley.sprite3\" / \"90feaffe3d0c4d31287d57bd1bc64afa.png\"\n", + "# ,SPRITE_DIR / \"Ripley.sprite3\" / \"e751d0a781694897f75046eb2810e9a5.png\"\n", + "# ,SPRITE_DIR / \"Ripley.sprite3\" / \"f798adaf44e8891c5e2f1b2a82a613b2.png\"\n", + "# ,SPRITE_DIR / \"Robot.sprite3\" / \"10060b3b58c77345cfe92288a46e5c20.png\"\n", + "# ,SPRITE_DIR / \"Robot.sprite3\" / \"36d1098b880dbe47e58d93e7b2842381.png\"\n", + "# ,SPRITE_DIR / \"Robot.sprite3\" / \"4f5441207afc9bc075b0b404dbba8b59.png\"\n", + "# ,SPRITE_DIR / \"Robot.sprite3\" / \"89679608327ad572b93225d06fe9edda.png\"\n", + "# ,SPRITE_DIR / \"Rocketship.sprite3\" / \"10f83786e5ee34f40ee43b49bba89ee2.png\"\n", + "# ,SPRITE_DIR / \"Rocketship.sprite3\" / \"49ee475c516a444d8a512724063b8b98.png\"\n", + "# ,SPRITE_DIR / \"Rocketship.sprite3\" / \"525c06ceb3a351244bcd810c9ba951c7.png\"\n", + "# ,SPRITE_DIR / \"Rocketship.sprite3\" / \"5682c68af2cc8aea791f0373e9ed03d8.png\"\n", + "# ,SPRITE_DIR / \"Rocketship.sprite3\" / \"a6ff2f1344a18cc0a4bcc945e00afaf4.png\"\n", + "# ,SPRITE_DIR / \"Rocks.sprite3\" / \"55426ccbb5c49b1526e53586943f3ec3.png\"\n", + "# ,SPRITE_DIR / \"Rooster.sprite3\" / \"0ae345deb1c81ec7f4f4644c26ac85fa.png\"\n", + "# ,SPRITE_DIR / \"Rooster.sprite3\" / \"6490360bd5d6efd2b646fb24c19df6b1.png\"\n", + "# ,SPRITE_DIR / \"Rooster.sprite3\" / \"bd5f701c99aa6512bac7b87c51e7cd46.png\"\n", + "# ,SPRITE_DIR / \"Ruby.sprite3\" / \"c30210e8f719c3a4d2c7cc6917a39300.png\"\n", + "# ,SPRITE_DIR / \"Ruby.sprite3\" / \"fc15fdbcc535473f6140cab28197f3be.png\"\n", + "# ,SPRITE_DIR / \"Sailboat.sprite3\" / \"ca241a938a2c44a0de6b91230012ff39.png\"\n", + "# ,SPRITE_DIR / \"Sam.sprite3\" / \"8208e99159b36c957fb9fbc187e51bc7.png\"\n", + "# ,SPRITE_DIR / \"Sasha.sprite3\" / \"89bb25e1465eb9481d267e4f9df592af.png\"\n", + "# ,SPRITE_DIR / \"Sasha.sprite3\" / \"a0b8890ce458aebed5e7002e1897508e.png\"\n", + "# ,SPRITE_DIR / \"Sasha.sprite3\" / \"e26bf53469cafd730ca150e745ceeafc.png\"\n", + "# ,SPRITE_DIR / \"Saxophone.sprite3\" / \"4414c51bdd03f60f40a1210e1d55cf57.png\"\n", + "# ,SPRITE_DIR / \"Saxophone.sprite3\" / \"459a64bebb7a788395c70e5369ab4746.png\"\n", + "# ,SPRITE_DIR / \"Scarf.sprite3\" / \"05b06ab8d2c6e2110896d70bb60a9fd7.png\"\n", + "# ,SPRITE_DIR / \"Scarf.sprite3\" / \"213db212d5d0c602f85cb248719ce785.png\"\n", + "# ,SPRITE_DIR / \"Scarf.sprite3\" / \"4a85e4e6232f12abf9802bec4aa419b3.png\"\n", + "# ,SPRITE_DIR / \"Shark 2.sprite3\" / \"6182a0628eadf2d16624864bea964432.png\"\n", + "# ,SPRITE_DIR / \"Shark 2.sprite3\" / \"7f4440b268358417aa79ccef06877c57.png\"\n", + "# ,SPRITE_DIR / \"Shark 2.sprite3\" / \"8a8d551e951087050cfa88fc64f9b4db.png\"\n", + "# ,SPRITE_DIR / \"Shark.sprite3\" / \"6c8008ae677ec51af8da5023fa2cd521.png\"\n", + "# ,SPRITE_DIR / \"Shark.sprite3\" / \"b769db8fcbbf2609f0552db62ec1f94a.png\"\n", + "# ,SPRITE_DIR / \"Shirt.sprite3\" / \"43e916bbe0ba7cecd08407d25ac3d104.png\"\n", + "# ,SPRITE_DIR / \"Shoes.sprite3\" / \"1e813a1618f38212a6febaa7e6b8d712.png\"\n", + "# ,SPRITE_DIR / \"Shoes.sprite3\" / \"71b5a444d482455e9956cfd52d20526a.png\"\n", + "# ,SPRITE_DIR / \"Shoes.sprite3\" / \"724d9a8984279949ce452fc9b2e437a6.png\"\n", + "# ,SPRITE_DIR / \"Shoes.sprite3\" / \"f89f1656251248f1591aa67ae946c047.png\"\n", + "# ,SPRITE_DIR / \"Shorts.sprite3\" / \"4d5f7a13ed20dc4f8fd194a7eb3f625f.png\"\n", + "# ,SPRITE_DIR / \"Shorts.sprite3\" / \"d5fc56b7247f079e5821d74d3e91e7a6.png\"\n", + "# ,SPRITE_DIR / \"Shorts.sprite3\" / \"ea78ad682811f9c42731ec648ec7af3c.png\"\n", + "# ,SPRITE_DIR / \"Singer1.sprite3\" / \"d6ff94dc7e24200c28015ee5d6373140.png\"\n", + "# ,SPRITE_DIR / \"Skeleton.sprite3\" / \"3cfff37072a4138b977ba406c290b419.png\"\n", + "# ,SPRITE_DIR / \"Skeleton.sprite3\" / \"67108e6b1d0f41aba2f94f81114ebf59.png\"\n", + "# ,SPRITE_DIR / \"Skeleton.sprite3\" / \"c4d755c672a0826caa7b6fb767cc3f9b.png\"\n", + "# ,SPRITE_DIR / \"Skeleton.sprite3\" / \"f4a00b2bd214b1d8412a2e89b2030354.png\"\n", + "# ,SPRITE_DIR / \"Snake.sprite3\" / \"42519e0ee19d75def88a514d3c49ce37.png\"\n", + "# ,SPRITE_DIR / \"Snake.sprite3\" / \"a0acb49efdf60b20cea0833eeedd44a1.png\"\n", + "# ,SPRITE_DIR / \"Snake.sprite3\" / \"f0e6ebdbdc8571b42f8a48cc2aed3042.png\"\n", + "# ,SPRITE_DIR / \"Snowflake.sprite3\" / \"083735cc9cd0e6d8c3dbab5ab9ee5407.png\"\n", + "# ,SPRITE_DIR / \"Snowman.sprite3\" / \"0f109df620f935b94cb154101e6586d4.png\"\n", + "# ,SPRITE_DIR / \"Soccer Ball.sprite3\" / \"5d973d7a3a8be3f3bd6e1cd0f73c32b5.png\"\n", + "# ,SPRITE_DIR / \"Speaker.sprite3\" / \"697f6becae5321f77990636564ef0c97.png\"\n", + "# ,SPRITE_DIR / \"Squirrel.sprite3\" / \"b86efb7f23387300cf9037a61f328ab9.png\"\n", + "# ,SPRITE_DIR / \"Star.sprite3\" / \"551629f2a64c1f3703e57aaa133effa6.png\"\n", + "# ,SPRITE_DIR / \"Starfish.sprite3\" / \"69dca6e42d45d3fef89f81de40b11bef.png\"\n", + "# ,SPRITE_DIR / \"Starfish.sprite3\" / \"be2ca55a5688670302e7c3f79d5040d1.png\"\n", + "# ,SPRITE_DIR / \"Stop.sprite3\" / \"1e2c3987e4cdb1f317b1773662719b13.png\"\n", + "# ,SPRITE_DIR / \"Story-A.sprite3\" / \"3c46f5192d2c29f957381e0100c6085d.png\"\n", + "# ,SPRITE_DIR / \"Story-A.sprite3\" / \"4b1beecd9a8892df0918242b2b5fbd4c.png\"\n", + "# ,SPRITE_DIR / \"Story-A.sprite3\" / \"7a6fdf5e26fc690879f8e215bfdec4d5.png\"\n", + "# ,SPRITE_DIR / \"Story-B.sprite3\" / \"22817ed2e4253787c78d7b696bbefdc1.png\"\n", + "# ,SPRITE_DIR / \"Story-B.sprite3\" / \"5f8301434ce176ab328f5b658ee1ec05.png\"\n", + "# ,SPRITE_DIR / \"Story-B.sprite3\" / \"a09376e1eacf17be3c9fbd268674b9f7.png\"\n", + "# ,SPRITE_DIR / \"Story-C.sprite3\" / \"5e61610cbba50ba86f18830f61bbaecb.png\"\n", + "# ,SPRITE_DIR / \"Story-C.sprite3\" / \"6bd5cb8bc3e4df5e055f4c56dd630855.png\"\n", + "# ,SPRITE_DIR / \"Story-C.sprite3\" / \"f6ff602902affbae2f89b389f08df432.png\"\n", + "# ,SPRITE_DIR / \"Story-D.sprite3\" / \"130cc4b9ad8dd8936d22c51c05ac6860.png\"\n", + "# ,SPRITE_DIR / \"Story-D.sprite3\" / \"b28d76f648ad24932a18cb40c8d76bc5.png\"\n", + "# ,SPRITE_DIR / \"Story-D.sprite3\" / \"dd713e3bf42d7a4fd8d2f12094db1c63.png\"\n", + "# ,SPRITE_DIR / \"Story-E.sprite3\" / \"3005df22798da45f1daf1de7421bb91d.png\"\n", + "# ,SPRITE_DIR / \"Story-E.sprite3\" / \"4e903ac41a7e16a52efff8477f2398c7.png\"\n", + "# ,SPRITE_DIR / \"Story-E.sprite3\" / \"add5c5a8eec67eb010b5cbd44dea5c8d.png\"\n", + "# ,SPRITE_DIR / \"Story-F.sprite3\" / \"4a3ae31dd3dd3b96239a0307cfdaa1b6.png\"\n", + "# ,SPRITE_DIR / \"Story-F.sprite3\" / \"83565581ecc9f7d4010efd8683a99393.png\"\n", + "# ,SPRITE_DIR / \"Story-F.sprite3\" / \"d4ec9a1827429f4e2f3dc239dcc15b95.png\"\n", + "# ,SPRITE_DIR / \"Story-G.sprite3\" / \"648cfdd48a7f748e6198194669ba1909.png\"\n", + "# ,SPRITE_DIR / \"Story-G.sprite3\" / \"85144902cc61fe98dca513b74276d7d8.png\"\n", + "# ,SPRITE_DIR / \"Story-G.sprite3\" / \"8fb61932544adbe8c95b067ad1351758.png\"\n", + "# ,SPRITE_DIR / \"Story-H.sprite3\" / \"70520daa9f82a2347c8a8fa9e7fe1a6e.png\"\n", + "# ,SPRITE_DIR / \"Story-H.sprite3\" / \"99aae97a2b49904db7eeb813fa968582.png\"\n", + "# ,SPRITE_DIR / \"Story-H.sprite3\" / \"eec286b1cfea3f219a5b486931abedd2.png\"\n", + "# ,SPRITE_DIR / \"Story-I.sprite3\" / \"1bceea90292a51a7177abf581f28bf2c.png\"\n", + "# ,SPRITE_DIR / \"Story-I.sprite3\" / \"2c156e20da1ad4e8e397a89ad8fb1c26.png\"\n", + "# ,SPRITE_DIR / \"Story-I.sprite3\" / \"9cad752323aa81dfa8d8cf009057b108.png\"\n", + "# ,SPRITE_DIR / \"Story-J.sprite3\" / \"2838de5d131785c985eb0eab25ec63af.png\"\n", + "# ,SPRITE_DIR / \"Story-J.sprite3\" / \"7d7d6f257a6bf3668a0befa4199f16a0.png\"\n", + "# ,SPRITE_DIR / \"Story-J.sprite3\" / \"d5b58ddd6f6b4fdcfdfd86d102853935.png\"\n", + "# ,SPRITE_DIR / \"Story-K.sprite3\" / \"0cb908dbc38635cc595e6060afc1b682.png\"\n", + "# ,SPRITE_DIR / \"Story-K.sprite3\" / \"17ef8f63a2a8f47258bd62cf642fd8d6.png\"\n", + "# ,SPRITE_DIR / \"Story-K.sprite3\" / \"ecf86afea23fd95e27d4e63659adbfa6.png\"\n", + "# ,SPRITE_DIR / \"Story-L.sprite3\" / \"0fc3ac08468935694255ef8a461d4d26.png\"\n", + "# ,SPRITE_DIR / \"Story-L.sprite3\" / \"935c7cf21c35523c0a232013a6399a49.png\"\n", + "# ,SPRITE_DIR / \"Story-L.sprite3\" / \"ec4d85a60c32c7637de31dbf503266a0.png\"\n", + "# ,SPRITE_DIR / \"Story-M.sprite3\" / \"42e5468fa164e001925d5a49d372f4b1.png\"\n", + "# ,SPRITE_DIR / \"Story-M.sprite3\" / \"643896fcad0a1bf6eb9f3f590094687c.png\"\n", + "# ,SPRITE_DIR / \"Story-M.sprite3\" / \"9bf9e677da34528433d3c1acb945e2df.png\"\n", + "# ,SPRITE_DIR / \"Story-N.sprite3\" / \"40ffad793f4042a5fe7b3aaa6bc175ae.png\"\n", + "# ,SPRITE_DIR / \"Story-N.sprite3\" / \"80c8f32282b697097933837905a6f257.png\"\n", + "# ,SPRITE_DIR / \"Story-N.sprite3\" / \"c2f77473dd16d1a3713218b05390a688.png\"\n", + "# ,SPRITE_DIR / \"Story-O.sprite3\" / \"0bdd31ea2b3b78d0c39022795a49c69a.png\"\n", + "# ,SPRITE_DIR / \"Story-O.sprite3\" / \"40bf3880b678beeda8cf708a51a4402d.png\"\n", + "# ,SPRITE_DIR / \"Story-O.sprite3\" / \"43a89fc1442627ca48b1dc631c517942.png\"\n", + "# ,SPRITE_DIR / \"Story-P.sprite3\" / \"1a41f74cd76d7202d8b22ffc7729e03f.png\"\n", + "# ,SPRITE_DIR / \"Story-P.sprite3\" / \"377eac55366670a03c469705c6689f09.png\"\n", + "# ,SPRITE_DIR / \"Story-P.sprite3\" / \"9cf707e83af27c47e74adb77496ffca5.png\"\n", + "# ,SPRITE_DIR / \"Story-Q.sprite3\" / \"01acd1076994a4379a3fc9e034bc05fc.png\"\n", + "# ,SPRITE_DIR / \"Story-Q.sprite3\" / \"84a6dc992bce018a1eac9be0173ad917.png\"\n", + "# ,SPRITE_DIR / \"Story-Q.sprite3\" / \"efc27a91c30d6a511be4245e36684192.png\"\n", + "# ,SPRITE_DIR / \"Story-R.sprite3\" / \"3c3f44aba3eff8856472e06b333a7201.png\"\n", + "# ,SPRITE_DIR / \"Story-R.sprite3\" / \"4f217b14a161fcd9590614b0733100ea.png\"\n", + "# ,SPRITE_DIR / \"Story-R.sprite3\" / \"5c1d38d02ae9c4df7851a6e9d52f25b4.png\"\n", + "# ,SPRITE_DIR / \"Story-S.sprite3\" / \"47b9f910048ce4db93bdfbcd2638e19a.png\"\n", + "# ,SPRITE_DIR / \"Story-S.sprite3\" / \"5a113fcacd35ababbf23c5a9289433d1.png\"\n", + "# ,SPRITE_DIR / \"Story-S.sprite3\" / \"fd2a94481c3ef0c223784b2f3c6df874.png\"\n", + "# ,SPRITE_DIR / \"Story-T.sprite3\" / \"001a2186db228fdd9bfbf3f15800bb63.png\"\n", + "# ,SPRITE_DIR / \"Story-T.sprite3\" / \"66b22b0ff0a5c1c205a701316ab954cf.png\"\n", + "# ,SPRITE_DIR / \"Story-T.sprite3\" / \"b61e1ac30aa2f35d4fd8c23fab1f76ea.png\"\n", + "# ,SPRITE_DIR / \"Story-U.sprite3\" / \"51dd73c840ba3aca0f9770e13cb14fb3.png\"\n", + "# ,SPRITE_DIR / \"Story-U.sprite3\" / \"cfb334b977b8f2a39aa56b1e0532829e.png\"\n", + "# ,SPRITE_DIR / \"Story-U.sprite3\" / \"f6b7b4da5362fdac29d84f1fbf19e3f4.png\"\n", + "# ,SPRITE_DIR / \"Story-V.sprite3\" / \"43a8993221848f90e9f37664e7832b4a.png\"\n", + "# ,SPRITE_DIR / \"Story-V.sprite3\" / \"d5c20886e3eb0ca0f5430c9482b1d832.png\"\n", + "# ,SPRITE_DIR / \"Story-V.sprite3\" / \"f27e7a4216665a6eab43fe9b4b5ec934.png\"\n", + "# ,SPRITE_DIR / \"Story-W.sprite3\" / \"396e27d20d1a49edaa106ba6d667cedd.png\"\n", + "# ,SPRITE_DIR / \"Story-W.sprite3\" / \"528df57da4490f6da8c75da06a1367f5.png\"\n", + "# ,SPRITE_DIR / \"Story-W.sprite3\" / \"f21ba826cd88c376e868f079d6df273c.png\"\n", + "# ,SPRITE_DIR / \"Story-X.sprite3\" / \"04be1176e562eff16f1159f69945a82e.png\"\n", + "# ,SPRITE_DIR / \"Story-X.sprite3\" / \"ca4e3e84788bdeea42dd5ed952d5a66c.png\"\n", + "# ,SPRITE_DIR / \"Story-X.sprite3\" / \"db0c1a6499169aac6639a1a0076658ce.png\"\n", + "# ,SPRITE_DIR / \"Story-Y.sprite3\" / \"093a9410933f7d01f459f08bcb01735b.png\"\n", + "# ,SPRITE_DIR / \"Story-Y.sprite3\" / \"59275f907633ce02074f787e5767bfde.png\"\n", + "# ,SPRITE_DIR / \"Story-Y.sprite3\" / \"d7fabe2652c93dd1bf91d9064cf5a348.png\"\n", + "# ,SPRITE_DIR / \"Story-Z.sprite3\" / \"23c24dbee23b1545afa8ee15ed339327.png\"\n", + "# ,SPRITE_DIR / \"Story-Z.sprite3\" / \"34825a171f7b35962484fa53e99ff632.png\"\n", + "# ,SPRITE_DIR / \"Story-Z.sprite3\" / \"665db4c356d7e010fa8d71cc291834e3.png\"\n", + "# ,SPRITE_DIR / \"Strawberry.sprite3\" / \"10ed1486ff4bab3eebb3b8ae55d81ccd.png\"\n", + "# ,SPRITE_DIR / \"Strawberry.sprite3\" / \"2fa57942dc7ded7eddc4d41554768d67.png\"\n", + "# ,SPRITE_DIR / \"Strawberry.sprite3\" / \"662279c12965d2913a060a55aebec496.png\"\n", + "# ,SPRITE_DIR / \"Strawberry.sprite3\" / \"aa4eae20c750900e4f63e6ede4083d81.png\"\n", + "# ,SPRITE_DIR / \"Strawberry.sprite3\" / \"f5008785e74590689afca4b578d108a4.png\"\n", + "# ,SPRITE_DIR / \"Sun.sprite3\" / \"406808d86aff20a15d592b308e166a32.png\"\n", + "# ,SPRITE_DIR / \"Sunglasses1.sprite3\" / \"c95a05c3bed665027d267d93454c428a.png\"\n", + "# ,SPRITE_DIR / \"Sunglasses1.sprite3\" / \"dc568ae1f8b9b6544f0634ef975a7098.png\"\n", + "# ,SPRITE_DIR / \"Taco.sprite3\" / \"383ea1ef802bc2706670536cfa8271b7.png\"\n", + "# ,SPRITE_DIR / \"Taco.sprite3\" / \"c97113d17afeaac9f461ea0ec257ef26.png\"\n", + "# ,SPRITE_DIR / \"Takeout.sprite3\" / \"24cc271fd6cf55f25b71e78faf749a98.png\"\n", + "# ,SPRITE_DIR / \"Takeout.sprite3\" / \"2b32d6a4a724c38bfaeb494d30827f19.png\"\n", + "# ,SPRITE_DIR / \"Takeout.sprite3\" / \"40f63eb18230c4defa9051830beffb0f.png\"\n", + "# ,SPRITE_DIR / \"Takeout.sprite3\" / \"9202a59888545c56c864bacb700c4297.png\"\n", + "# ,SPRITE_DIR / \"Takeout.sprite3\" / \"e03cd6e668e0eeddb2da98a095e2f30f.png\"\n", + "# ,SPRITE_DIR / \"Tatiana.sprite3\" / \"5cf65a9f942ca92c93915527ff9db1e6.png\"\n", + "# ,SPRITE_DIR / \"Tatiana.sprite3\" / \"91fb7d056beaf553ccec03d61d72c545.png\"\n", + "# ,SPRITE_DIR / \"Tatiana.sprite3\" / \"e207fd3f99e1db8c5d66f49446f27e37.png\"\n", + "# ,SPRITE_DIR / \"Tatiana.sprite3\" / \"e2ea6bbc6066574d4836e808a1c5f849.png\"\n", + "# ,SPRITE_DIR / \"Taylor.sprite3\" / \"a504d785629f2d1ca6b87e80b334d5e8.png\"\n", + "# ,SPRITE_DIR / \"Taylor.sprite3\" / \"ae2eaae0882543dc276c8e7d56ff2e7b.png\"\n", + "# ,SPRITE_DIR / \"Taylor.sprite3\" / \"e0082f49fc5d0d83d7fad6124ba82bb1.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"279bd5499329f98a68cf92c68014e198.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"377b8521c436f4f39ed2100fa1cb7c2f.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"3c9a7eac1d696ae74ee40c6efa8fa4dd.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"548bdf23904e409c1fcc0992f44d0b4c.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"580fba92f23d5592200eb5a9079dc38f.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"8313a2229d555bbdb8ce92dffed067ad.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"86602007ae2952236d47d7fd587a56b6.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"b2f75ac1cd84615efaea6a7d7a4ee205.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"ce2141ce97921ddc333bc65ff5bec27d.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"e06ac61e96e3a5abf4ca0863816f5d28.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"e51942bb4651e616549cfce1ad36ff83.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"f60f99278455c843b7833fb7615428dd.png\"\n", + "# ,SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"fea7045c09073700b88fae8d4d257cd1.png\"\n", + "# ,SPRITE_DIR / \"Tennis Ball.sprite3\" / \"34fa36004be0340ec845ba6bbeb5e5d5.png\"\n", + "# ,SPRITE_DIR / \"Tera.sprite3\" / \"18f9a11ecdbd3ad8719beb176c484d41.png\"\n", + "# ,SPRITE_DIR / \"Tera.sprite3\" / \"2daca5f43efc2d29fb089879448142e9.png\"\n", + "# ,SPRITE_DIR / \"Tera.sprite3\" / \"365d4de6c99d71f1370f7c5e636728af.png\"\n", + "# ,SPRITE_DIR / \"Tera.sprite3\" / \"5456a723f3b35eaa946b974a59888793.png\"\n", + "# ,SPRITE_DIR / \"Toucan.sprite3\" / \"72952d831d0b67c9d056b44a4bc3d0ae.png\"\n", + "# ,SPRITE_DIR / \"Toucan.sprite3\" / \"9eef2e49b3bbf371603ae783cd82db3c.png\"\n", + "# ,SPRITE_DIR / \"Toucan.sprite3\" / \"b6345d7386021ee85bb17f8aa4950eed.png\"\n", + "# ,SPRITE_DIR / \"Trampoline.sprite3\" / \"8fa3c6fcff2f25f5fe7842d68dcfe5cf.png\"\n", + "# ,SPRITE_DIR / \"Tree1.sprite3\" / \"d04b15886635101db8220a4361c0c88d.png\"\n", + "# ,SPRITE_DIR / \"Trees.sprite3\" / \"04758bd432a8b1cab527bddf14432147.png\"\n", + "# ,SPRITE_DIR / \"Trees.sprite3\" / \"551b3fae8eab06b49013f54009a7767a.png\"\n", + "# ,SPRITE_DIR / \"Trisha.sprite3\" / \"2d06023ec09ec312ab49055530511134.png\"\n", + "# ,SPRITE_DIR / \"Trisha.sprite3\" / \"55d31103bc86447c6a727b4f0664a5ea.png\"\n", + "# ,SPRITE_DIR / \"Trisha.sprite3\" / \"c31dc8487a841f644889784ff437e2c5.png\"\n", + "# ,SPRITE_DIR / \"Truck.sprite3\" / \"63b00424bdabc3459e5bc554c6c21e06.png\"\n", + "# ,SPRITE_DIR / \"Truck.sprite3\" / \"aaa05abc5aa182a0d7bfdc6db0f3207a.png\"\n", + "# ,SPRITE_DIR / \"Truck.sprite3\" / \"ce077e6db3573062017f94c2e4a8caea.png\"\n", + "# ,SPRITE_DIR / \"Trumpet.sprite3\" / \"47a1ec267505be96b678df30b92ec534.png\"\n", + "# ,SPRITE_DIR / \"Trumpet.sprite3\" / \"9a5c211622d6d2fed600c1809fccd21d.png\"\n", + "# ,SPRITE_DIR / \"Unicorn 2.sprite3\" / \"dcbeac8e856c9ddd6c457376be6573c8.png\"\n", + "# ,SPRITE_DIR / \"Unicorn Running.sprite3\" / \"1fb3d038e985c01899881bc5bb373c16.png\"\n", + "# ,SPRITE_DIR / \"Unicorn Running.sprite3\" / \"4709966d11b37e8a11d24c800e8b2859.png\"\n", + "# ,SPRITE_DIR / \"Unicorn Running.sprite3\" / \"8feaeec435125227c675dd95f69ff835.png\"\n", + "# ,SPRITE_DIR / \"Unicorn Running.sprite3\" / \"e111350b8bedefffee0d5e7e2490d446.png\"\n", + "# ,SPRITE_DIR / \"Unicorn Running.sprite3\" / \"f00efa25fc97f2cce2499771d6a5f809.png\"\n", + "# ,SPRITE_DIR / \"Unicorn Running.sprite3\" / \"fa5fe4596494a43db8c7957d2254aee3.png\"\n", + "# ,SPRITE_DIR / \"Unicorn.sprite3\" / \"1439d51d9878276362b123c9045af6b5.png\"\n", + "# ,SPRITE_DIR / \"Wand.sprite3\" / \"c021f0c7e3086a11336421dd864b7812.png\"\n", + "# ,SPRITE_DIR / \"Wanda.sprite3\" / \"0b008dabac95126132ab4e0c56d25400.png\"\n", + "# ,SPRITE_DIR / \"Watermelon.sprite3\" / \"1ed1c8b78eae2ee7422074d7f883031d.png\"\n", + "# ,SPRITE_DIR / \"Watermelon.sprite3\" / \"21d1340478e32a942914a7afd12b9f1a.png\"\n", + "# ,SPRITE_DIR / \"Watermelon.sprite3\" / \"677738282686d2dcce35d731c3ddc043.png\"\n", + "# ,SPRITE_DIR / \"Winter Hat.sprite3\" / \"2672323e34d6dc82fda8fc3b057fa5aa.png\"\n", + "# ,SPRITE_DIR / \"Witch.sprite3\" / \"44cbaf358d2d8e66815e447c25a4b72e.png\"\n", + "# ,SPRITE_DIR / \"Witch.sprite3\" / \"668c9dc76ba6a07bebabf5aed4623566.png\"\n", + "# ,SPRITE_DIR / \"Witch.sprite3\" / \"a7e48fc790511fbd46b30b1cdcdc98fc.png\"\n", + "# ,SPRITE_DIR / \"Witch.sprite3\" / \"b10fb75f426397e10c878fda19d92009.png\"\n", + "# ,SPRITE_DIR / \"Wizard Girl.sprite3\" / \"4be145d338d921b2d9d6dfd10cda4a6c.png\"\n", + "# ,SPRITE_DIR / \"Wizard Hat.sprite3\" / \"398e447e36465c2521fdb3a6917b0c65.png\"\n", + "# ,SPRITE_DIR / \"Wizard-toad.sprite3\" / \"4041d5a2d1869e81268b9b92b49013a3.png\"\n", + "# ,SPRITE_DIR / \"Wizard-toad.sprite3\" / \"ca3bb4d397ecf6cda3edc48340af908b.png\"\n", + "# ,SPRITE_DIR / \"Wizard.sprite3\" / \"55ba51188af86ca16ef30267e874c1ed.png\"\n", + "# ,SPRITE_DIR / \"Wizard.sprite3\" / \"91d495085eb4d02a375c42f6318071e7.png\"\n", + "# ,SPRITE_DIR / \"Wizard.sprite3\" / \"df943c9894ee4b9df8c5893ce30c2a5f.png\"\n", + "# ,SPRITE_DIR / \"Zebra.sprite3\" / \"0e3bc5073305b7079b5e9a8c7b7d7f9b.png\"\n", + "# ,SPRITE_DIR / \"Zebra.sprite3\" / \"f3e322a25b9f79801066056de6f33fb1.png\"\n", + "# ,BACKDROP_DIR / \"Arctic.sb3\" / \"67e0db3305b3c8bac3a363b1c428892e.png\"\n", + "# # ,BACKDROP_DIR / \"Arctic.sb3\" / \"8eb8790be5507fdccf73e7c1570bbbab.png\"\n", + "# ,BACKDROP_DIR / \"Baseball 1.sb3\" / \"825d9b54682c406215d9d1f98a819449.png\"\n", + "# ,BACKDROP_DIR / \"Baseball 2.sb3\" / \"7be1f5b3e682813dac1f297e52ff7dca.png\"\n", + "# ,BACKDROP_DIR / \"Basketball 1.sb3\" / \"ae21eac3d1814aee1d37ae82ea287816.png\"\n", + "# ,BACKDROP_DIR / \"Basketball 2.sb3\" / \"a5865738283613a2725b2c9dda6d8c78.png\"\n", + "# ,BACKDROP_DIR / \"Beach Malibu.sb3\" / \"050615fe992a00d6af0e664e497ebf53.png\"\n", + "# ,BACKDROP_DIR / \"Beach Rio.sb3\" / \"968f0ede6e70e1dbb763d6fd4c5003e0.png\"\n", + "# ,BACKDROP_DIR / \"Bedroom 1.sb3\" / \"7aa6bbb2ddc4c10f901e1a50aeac1c7e.png\"\n", + "# ,BACKDROP_DIR / \"Bedroom 2.sb3\" / \"e2f8b0dbd0a65d2ad8bfc21616662a6a.png\"\n", + "# ,BACKDROP_DIR / \"Bedroom 3.sb3\" / \"8cc0b88d53345b3e337e8f028a32a4e7.png\"\n", + "# ,BACKDROP_DIR / \"Bench With View.sb3\" / \"962201a2b712a302fb087f8f0dcb2076.png\"\n", + "# ,BACKDROP_DIR / \"Blue Sky 2.sb3\" / \"8eb8790be5507fdccf73e7c1570bbbab.png\"\n", + "# ,BACKDROP_DIR / \"Blue Sky.sb3\" / \"e7c147730f19d284bcd7b3f00af19bb6.png\"\n", + "# ,BACKDROP_DIR / \"Boardwalk.sb3\" / \"de0e54cd11551566f044e7e6bc588b2c.png\"\n", + "# ,BACKDROP_DIR / \"Canyon.sb3\" / \"c7c0b27b959193a0b570a9639cfe8158.png\"\n", + "# ,BACKDROP_DIR / \"Castle 1.sb3\" / \"e1914ed7917267f1c2ef2b48004cade9.png\"\n", + "# ,BACKDROP_DIR / \"Castle 2.sb3\" / \"951765ee7f7370f120c9df20b577c22f.png\"\n", + "# ,BACKDROP_DIR / \"Castle 3.sb3\" / \"76fa99f67569fcd39b4be74ed38c33f3.png\"\n", + "# ,BACKDROP_DIR / \"Castle 4.sb3\" / \"4f45f79af8e8dac3d41eb5a06ade61d4.png\"\n", + "# ,BACKDROP_DIR / \"Chalkboard.sb3\" / \"a8a24b5aa717bbef09dbe31368914427.png\"\n", + "# ,BACKDROP_DIR / \"Circles.sb3\" / \"c9847be305920807c5597d81576dd0c4.png\"\n", + "# ,BACKDROP_DIR / \"City With Water.sb3\" / \"1ef98019fc94ea65a1b55d5521285c7a.png\"\n", + "# ,BACKDROP_DIR / \"Colorful City.sb3\" / \"04d18ddd1b85f0ea30beb14b8da49f60.png\"\n", + "# ,BACKDROP_DIR / \"Concert.sb3\" / \"c8d90320d2966c08af8cdd1c6a7a93b5.png\"\n", + "# ,BACKDROP_DIR / \"Desert.sb3\" / \"d98a9526a34890cf4bad11b5409eae2a.png\"\n", + "# ,BACKDROP_DIR / \"Farm.sb3\" / \"1e8a70bd07f1dcba3383883f3b948266.png\"\n", + "# ,BACKDROP_DIR / \"Field At Mit.sb3\" / \"5b0a970202b464915915260c03f05455.png\"\n", + "# ,BACKDROP_DIR / \"Flowers.sb3\" / \"25a6ede51a96d4e55de2ffb81ae96f8c.png\"\n", + "# ,BACKDROP_DIR / \"Forest.sb3\" / \"92968ac16b2f0c3f7835a6dacd172c7b.png\"\n", + "# ,BACKDROP_DIR / \"Galaxy.sb3\" / \"5fab1922f254ae9fd150162c3e392bef.png\"\n", + "# ,BACKDROP_DIR / \"Garden-rock.sb3\" / \"4f66053598bea0905e1559ab9d5a6e31.png\"\n", + "# ,BACKDROP_DIR / \"Greek Theater.sb3\" / \"93d71e8b8a96cc007b8d68f36acd338a.png\"\n", + "# ,BACKDROP_DIR / \"Hall.sb3\" / \"ea86ca30b346f27ca5faf1254f6a31e3.png\"\n", + "# ,BACKDROP_DIR / \"Hay Field.sb3\" / \"da102a69d135973e0fc139131dec785a.png\"\n", + "# ,BACKDROP_DIR / \"Hearts.sb3\" / \"f98526ccb0eec3ac7d6c8f8ab502825e.png\"\n", + "# ,BACKDROP_DIR / \"Hill.sb3\" / \"2129c842f28d6881f622fdc3497ff2da.png\"\n", + "# ,BACKDROP_DIR / \"Jungle.sb3\" / \"f4f908da19e2753f3ed679d7b37650ca.png\"\n", + "# ,BACKDROP_DIR / \"Jurassic.sb3\" / \"64025bdca5db4938f65597e3682fddcf.png\"\n", + "# ,BACKDROP_DIR / \"Light.sb3\" / \"4b98c07876ed8997c3762e75790507b4.png\"\n", + "# ,BACKDROP_DIR / \"Metro.sb3\" / \"0b4a15ba028bf205ec051390d6ac4de7.png\"\n", + "# ,BACKDROP_DIR / \"Moon.sb3\" / \"0b1d2eaf22d62ef88de80ccde5578fba.png\"\n", + "# ,BACKDROP_DIR / \"Mountain.sb3\" / \"f84989feee2cf462a1c597169777ee3c.png\"\n", + "# ,BACKDROP_DIR / \"Mural.sb3\" / \"efb625f7e0b199b15f69e116cd053cea.png\"\n", + "# ,BACKDROP_DIR / \"Nebula.sb3\" / \"9b5cdbd596da1b6149f56b794b6394f4.png\"\n", + "# ,BACKDROP_DIR / \"Neon Tunnel.sb3\" / \"57d2b13b2f73d3d878c72810c137b0d6.png\"\n", + "# ,BACKDROP_DIR / \"Night City With Street.sb3\" / \"14443ad7907b6479d7562a12b8ae0efb.png\"\n", + "# ,BACKDROP_DIR / \"Night City.sb3\" / \"6fdc795ff487204f72740567be5f64f9.png\"\n", + "# ,BACKDROP_DIR / \"Party.sb3\" / \"108160d0e44d1c340182e31c9dc0758a.png\"\n", + "# ,BACKDROP_DIR / \"Pathway.sb3\" / \"5d747ec036755a4b129f0d5b978bc61c.png\"\n", + "# ,BACKDROP_DIR / \"Playground.sb3\" / \"e5f794c8756ca0cead5cb7e7fe354c41.png\"\n", + "# ,BACKDROP_DIR / \"Playing Field.sb3\" / \"2de108f3098e92f5c5976cf75d38e99d.png\"\n", + "# ,BACKDROP_DIR / \"Pool.sb3\" / \"6cab934df643d2fc508cfa90c0c4059b.png\"\n", + "# ,BACKDROP_DIR / \"Rays.sb3\" / \"87e963282db9e020e8c4d075891ea12b.png\"\n", + "# ,BACKDROP_DIR / \"Refrigerator.sb3\" / \"98f053f9681e872f34fafd783ce72205.png\"\n", + "# ,BACKDROP_DIR / \"Room 1.sb3\" / \"87ec29ad216c0074c731d581c7f40c39.png\"\n", + "# ,BACKDROP_DIR / \"Room 1.sb3\" / \"a81668321aa3dcc0fc185d3e36ae76f6.png\"\n", + "# ,BACKDROP_DIR / \"Room 2.sb3\" / \"05ae3e3bbea890a6e3552ffe8456775e.png\"\n", + "# ,BACKDROP_DIR / \"Savanna.sb3\" / \"9b020b8c7cb6a9592f7303add9441d8f.png\"\n", + "# ,BACKDROP_DIR / \"School.sb3\" / \"1dea69ac0f62cf538d368a7bde1372ac.png\"\n", + "# ,BACKDROP_DIR / \"Slopes.sb3\" / \"63b6a69594a0a87888b56244bfa2ac1b.png\"\n", + "# ,BACKDROP_DIR / \"Soccer 2.sb3\" / \"b0dc1268cb595aaeef405bce40d1639c.png\"\n", + "# ,BACKDROP_DIR / \"Soccer.sb3\" / \"04a63154f04b09494354090f7cc2f1b9.png\"\n", + "# ,BACKDROP_DIR / \"Space City 1.sb3\" / \"20344b0edcc498281e4cb80242a72667.png\"\n", + "# ,BACKDROP_DIR / \"Space City 2.sb3\" / \"32b2316fd375faa18088f6c57ebb1c8d.png\"\n", + "# ,BACKDROP_DIR / \"Space.sb3\" / \"84208d9a3718ec3c9fc5a32a792fa1d0.png\"\n", + "# ,BACKDROP_DIR / \"Spaceship.sb3\" / \"0c450891306fa63ef02aa0fda7fd0ef9.png\"\n", + "# ,BACKDROP_DIR / \"Spotlight.sb3\" / \"d26bf4c3980163d9106625cc2ea6c50d.png\"\n", + "# ,BACKDROP_DIR / \"Stars.sb3\" / \"47282ff0f7047c6fab9c94b531abf721.png\"\n", + "# ,BACKDROP_DIR / \"Stripes.sb3\" / \"a6a21f5c08d586e8daaebde37c97fb6f.png\"\n", + "# ,BACKDROP_DIR / \"Theater 2.sb3\" / \"061a78ed83495dd0acd6d62e83e1b972.png\"\n", + "# ,BACKDROP_DIR / \"Theater.sb3\" / \"c2b097bc5cdb6a14ef5485202bc5ee76.png\"\n", + "# ,BACKDROP_DIR / \"Tree.sb3\" / \"a23fbf972001c94637b568992f8fd7bd.png\"\n", + "# ,BACKDROP_DIR / \"Underwater 1.sb3\" / \"d3344650f594bcecdf46aa4a9441badd.png\"\n", + "# ,BACKDROP_DIR / \"Underwater 2.sb3\" / \"1517c21786d2d0edc2f3037408d850bd.png\"\n", + "# ,BACKDROP_DIR / \"Urban.sb3\" / \"1679049718869e1f548e1e8823e29c1c.png\"\n", + "# ,BACKDROP_DIR / \"Wall 1.sb3\" / \"7e5327c68ff6ddabc48dbfe4717a04fe.png\"\n", + "# ,BACKDROP_DIR / \"Wall 2.sb3\" / \"82d867fcd9f1b5f49e29c2f853d55665.png\"\n", + "# ,BACKDROP_DIR / \"Water And Rocks.sb3\" / \"0015433a406a53f00b792424b823268c.png\"\n", + "# ,BACKDROP_DIR / \"Wetland.sb3\" / \"ef9973bcff6d4cbc558e946028ec7d23.png\"\n", + "# ,BACKDROP_DIR / \"Winter.sb3\" / \"5fa9385a60b904672d0e46e9d768bb32.png\"\n", + "# ,BACKDROP_DIR / \"Witch House.sb3\" / \"30085b2d27beb5acdbe895d8b3e64b04.png\"\n", + "# ,BACKDROP_DIR / \"Woods And Bench.sb3\" / \"4fcf7ed0de6c6b6e9b52c511b0650e9c.png\"\n", + "# ,BACKDROP_DIR / \"Woods.sb3\" / \"f3eb165d6f3fd23370f97079f2e631bf.png\"\n", + "# ,BACKDROP_DIR / \"Xy-grid-20px.sb3\" / \"4eec0e1db92b8dea3e5bee25105e8f46.png\"\n", + "# ,BACKDROP_DIR / \"Xy-grid-30px.sb3\" / \"3b8bcabd0ac683b7cb3673208039764b.png\"\n", + "# ,BACKDROP_DIR / \"Xy-grid.sb3\" / \"9838d02002d05f88dc54d96494fbc202.png\"]\n", + "# out_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\embed2.json\"\n", + "# regenerate_reference_embeddings(folder_image_paths, out_path)" + ] + }, + { + "cell_type": "markdown", + "id": "a8bf7ec9", + "metadata": {}, + "source": [ + "# Continue" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "38be5463", + "metadata": {}, + "outputs": [], + "source": [ + "import cv2, json,base64,io,os,tempfile,logging, re\n", + "import numpy as np\n", + "from unstructured.partition.pdf import partition_pdf\n", + "from PIL import Image\n", + "# from imutils.perspective import four_point_transform\n", + "from dotenv import load_dotenv\n", + "import pytesseract\n", + "from werkzeug.utils import secure_filename\n", + "from langchain_groq import ChatGroq\n", + "from langgraph.prebuilt import create_react_agent\n", + "from pdf2image import convert_from_path, convert_from_bytes\n", + "from concurrent.futures import ThreadPoolExecutor\n", + "from pdf2image.exceptions import PDFInfoNotInstalledError\n", + "from typing import Dict, TypedDict, Optional, Any\n", + "from langgraph.graph import StateGraph, END\n", + "import uuid\n", + "import shutil, time, functools\n", + "from langchain_experimental.open_clip.open_clip import OpenCLIPEmbeddings\n", + "from langchain_core.utils.utils import secret_from_env\n", + "# from matplotlib.offsetbox import OffsetImage, AnnotationBbox\n", + "from io import BytesIO\n", + "from pathlib import Path\n", + "import os\n", + "#from utils.block_relation_builder import block_builder, separate_scripts, transform_logic_to_action_flow, analyze_opcode_counts\n", + "from langchain.chat_models import ChatOpenAI\n", + "from langchain_openai import ChatOpenAI\n", + "from pydantic import Field, SecretStr\n", + "from difflib import get_close_matches\n", + "pytesseract.pytesseract.tesseract_cmd = (r'/usr/bin/tesseract')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8fd02b48", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "10141174", + "metadata": {}, + "outputs": [], + "source": [ + "# manipulated_json = {}\n", + "# img_elements = []\n", + "# def extract_images_from_pdf(pdf_stream: io.BytesIO):\n", + "# ''' Extract images from PDF and generate structured sprite JSON '''\n", + "# try:\n", + "# if isinstance(pdf_stream, io.BytesIO):\n", + "# # use a random ID since there's no filename\n", + "# pdf_id = uuid.uuid4().hex \n", + "# else:\n", + "# pdf_id = os.path.splitext(os.path.basename(pdf_stream))[0]\n", + "\n", + "# try:\n", + "# elements = partition_pdf(\n", + "# # filename=str(pdf_path), # partition_pdf might expect a string\n", + "# file=pdf_stream, # 'file=', inplace of 'filename'\n", + "# strategy=\"hi_res\",\n", + "# extract_image_block_types=[\"Image\"],\n", + "# hi_res_model_name=\"yolox\",\n", + "# extract_image_block_to_payload=True,\n", + "# )\n", + "# print(f\"ELEMENTS\")\n", + "# except Exception as e:\n", + "# raise RuntimeError(\n", + "# f\"❌ Failed to extract images from PDF: {str(e)}\")\n", + "\n", + "# file_elements = [element.to_dict() for element in elements]\n", + "# sprite_count = 1\n", + "# for el in file_elements:\n", + "# img_b64 = el[\"metadata\"].get(\"image_base64\")\n", + "# if not img_b64:\n", + "# continue \n", + " \n", + " # manipulated_json[f\"Sprite {sprite_count}\"] = {\n", + " # # \"id\":auto_id,\n", + " # # \"name\": name,\n", + " # \"base64\": el[\"metadata\"][\"image_base64\"],\n", + " # \"file-path\": pdf_id,\n", + " # # \"description\": description\n", + " # }\n", + "# sprite_count += 1\n", + "# return manipulated_json \n", + "# except Exception as e:\n", + "# raise RuntimeError(f\"❌ Error in extract_images_from_pdf: {str(e)}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "9d07d2a3", + "metadata": {}, + "outputs": [], + "source": [ + "def extract_images_from_pdf(pdf_stream, output_dir):\n", + " manipulated_json = {}\n", + " try:\n", + " pdf_id = uuid.uuid4().hex\n", + " elements = partition_pdf(\n", + " file=pdf_stream,\n", + " strategy=\"hi_res\",\n", + " extract_image_block_types=[\"Image\"],\n", + " hi_res_model_name=\"yolox\",\n", + " extract_image_block_to_payload=False,\n", + " extract_image_block_output_dir=output_dir,\n", + " )\n", + " file_elements = [element.to_dict() for element in elements]\n", + " sprite_count = 1\n", + " for el in file_elements:\n", + " img_path = el[\"metadata\"].get(\"image_path\")\n", + " if not img_path:\n", + " continue\n", + " image_uuid = str(uuid.uuid4())\n", + " manipulated_json[f\"Sprite {sprite_count}\"] = {\n", + " \"uuid\": image_uuid,\n", + " \"file-path\": img_path,\n", + " \"pdf-id\": pdf_id,\n", + " }\n", + " sprite_count += 1\n", + " return manipulated_json\n", + " except Exception as e:\n", + " raise RuntimeError(f\"❌ Error in extract_images_from_pdf: {str(e)}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eb3f6b44", + "metadata": {}, + "outputs": [], + "source": [ + "def similarity_matching(sprites_data: str, project_folder: str) -> str:\n", + " logger.info(\"🔍 Running similarity matching…\")\n", + " os.makedirs(project_folder, exist_ok=True)\n", + "\n", + " # ----------------------------------------\n", + " # CHANGED: define normalized base-paths so startswith() checks work\n", + " backdrop_base_path = os.path.normpath(str(BACKDROP_DIR))\n", + " sprite_base_path = os.path.normpath(str(SPRITE_DIR))\n", + " code_blocks_path = os.path.normpath(str(CODE_BLOCKS_DIR))\n", + " # ----------------------------------------\n", + "\n", + " project_json_path = os.path.join(project_folder, \"project.json\")\n", + "\n", + " # ==============================\n", + " # READ SPRITE METADATA\n", + " # ==============================\n", + "\n", + " sprite_ids, sprite_base64 = [], []\n", + " for sid, sprite in sprites_data.items():\n", + " sprite_ids.append(sid)\n", + " # texts.append(\"This is \" + sprite.get(\"description\", sprite.get(\"name\", \")))\n", + " sprite_base64.append(sprite[\"base64\"])\n", + "\n", + " sprite_images_bytes = []\n", + " for b64 in sprite_base64:\n", + " img = Image.open(BytesIO(base64.b64decode(b64.split(\",\")[-1]))).convert(\"RGB\")\n", + " buffer = BytesIO()\n", + " img.save(buffer, format=\"PNG\")\n", + " buffer.seek(0)\n", + " sprite_images_bytes.append(buffer)\n", + " \n", + " # =========================================\n", + " # Build the list of all candidate images\n", + " # =========================================\n", + " folder_image_paths = [\n", + " # backdrops\n", + " BACKDROP_DIR / \"badroom3.sb3\" / \"\"8cc0b88d53345b3e337e8f028a32a4e7.png\",\n", + " BACKDROP_DIR / \"baseball2.sb3\" / \"7be1f5b3e682813dac1f297e52ff7dca.png\",\n", + " BACKDROP_DIR / \"beach_malibu.sb3\" / \"050615fe992a00d6af0e664e497ebf53.png\",\n", + " BACKDROP_DIR / \"castle2.sb3\" / \"\"951765ee7f7370f120c9df20b577c22f.png\",\n", + " BACKDROP_DIR / \"hall.sb3\" / \"\"ea86ca30b346f27ca5faf1254f6a31e3.png\",\n", + " BACKDROP_DIR / \"jungle.sb3\" / \"\"f4f908da19e2753f3ed679d7b37650ca.png\",\n", + " # sprites\n", + " SPRITE_DIR / \"\"Batter.sprite3\" / \"baseball_sprite_motion_1.png\",\n", + " SPRITE_DIR / \"\"Bear.sprite3\" / \"\"bear_motion_2.png\",\n", + " SPRITE_DIR / \"\"Beetle.sprite3\" / \"46d0dfd4ae7e9bfe3a6a2e35a4905eae.png\",\n", + " SPRITE_DIR / \"\"cat\" / \"\"cat_motion_1.png\",\n", + " SPRITE_DIR / \"\"Centaur.sprite3\" / \"2373556e776cad3ba4d6ee04fc34550b.png\",\n", + " SPRITE_DIR / \"\"Crab.sprite3\" / \"\"bear_element.png\",\n", + " SPRITE_DIR / \"\"Soccer Ball.sprite3\" / \"cat_football.png\",\n", + "\n", + " CODE_BLOCKS_DIR / \"script1.jpg\",\n", + " CODE_BLOCKS_DIR / \"script2.jpg\",\n", + " CODE_BLOCKS_DIR / \"script3.jpg\",\n", + " CODE_BLOCKS_DIR / \"script4.jpg\",\n", + " CODE_BLOCKS_DIR / \"script5.jpg\",\n", + " CODE_BLOCKS_DIR / \"script6.jpg\"\n", + " ]\n", + " folder_image_paths = [os.path.normpath(str(p)) for p in folder_image_paths]\n", + " # =========================================\n", + "\n", + " # -----------------------------------------\n", + " # Load reference embeddings from JSON\n", + " # -----------------------------------------\n", + " with open(f\"{BLOCKS_DIR}/embed.json\", \"r\") as f:\n", + " embedding_json = json.load(f)\n", + "\n", + " # ============================== #\n", + " # EMBED SPRITE IMAGES #\n", + " # ============================== #\n", + " # ensure model is initialized (fast no-op after first call)\n", + " init_dinov2()\n", + "\n", + " # embed the incoming sprite BytesIO images (same data structure you already use)\n", + " sprite_matrix = embed_bytesio_list(sprite_images_bytes, batch_size=8) # shape (N, D)\n", + "\n", + " # load reference embeddings from JSON (they must be numeric lists)\n", + " img_matrix = np.array([img[\"embeddings\"] for img in embedding_json], dtype=np.float32)\n", + "\n", + " # normalize both sides (important — stored embeddings may not be normalized)\n", + " sprite_matrix = l2_normalize_rows(sprite_matrix)\n", + " img_matrix = l2_normalize_rows(img_matrix)\n", + " \n", + " # =========================================\n", + " # Compute similarities & pick best match\n", + " # =========================================\n", + " similarity = np.matmul(sprite_matrix, img_matrix.T)\n", + " most_similar_indices = np.argmax(similarity, axis=1)\n", + "\n", + " # =========================================\n", + " # Copy matched sprite assets + collect data\n", + " # =========================================\n", + " project_data = []\n", + " copied_folders = set()\n", + "\n", + " for sprite_idx, matched_idx in enumerate(most_similar_indices):\n", + " matched_image_path = folder_image_paths[matched_idx]\n", + " matched_folder = os.path.dirname(matched_image_path)\n", + "\n", + " # CHANGED: use our new normalized sprite_base_path\n", + " if not matched_folder.startswith(sprite_base_path):\n", + " continue\n", + "\n", + " if matched_folder in copied_folders:\n", + " continue\n", + " copied_folders.add(matched_folder)\n", + " logger.info(f\"Matched sprite: {matched_image_path}\")\n", + "\n", + " sprite_json_path = os.path.join(matched_folder, 'sprite.json')\n", + " if not os.path.exists(sprite_json_path):\n", + " logger.warning(f\"No sprite.json in {matched_folder}\")\n", + " continue\n", + "\n", + " with open(sprite_json_path, 'r') as f:\n", + " sprite_info = json.load(f)\n", + " # copy all non‐matched files\n", + " for fname in os.listdir(matched_folder):\n", + " if fname in (os.path.basename(matched_image_path), 'sprite.json'):\n", + " continue\n", + " shutil.copy2(os.path.join(matched_folder, fname),\n", + " os.path.join(project_folder, fname))\n", + " project_data.append(sprite_info)\n", + "\n", + " # =========================================\n", + " # Copy matched backdrop assets + collect\n", + " # =========================================\n", + " backdrop_data = []\n", + " copied_backdrop_folders = set()\n", + " for backdrop_idx, matched_idx in enumerate(most_similar_indices):\n", + " matched_image_path = folder_image_paths[matched_idx]\n", + " matched_folder = os.path.dirname(matched_image_path)\n", + " matched_filename = os.path.basename(matched_image_path)\n", + " \n", + " # CHANGED: use our new normalized backdrop_base_path\n", + " if not matched_folder.startswith(backdrop_base_path):\n", + " continue\n", + "\n", + " # skip if backdrop folder already processed\n", + " if matched_folder in copied_backdrop_folders:\n", + " continue\n", + " copied_backdrop_folders.add(matched_folder)\n", + "\n", + " logger.info(f\"Matched backdrop: {matched_image_path}\")\n", + "\n", + " # 1) Copy the matched backdrop image itself\n", + " try:\n", + " shutil.copy2(\n", + " matched_image_path,\n", + " os.path.join(project_folder, matched_filename)\n", + " )\n", + " logger.info(f\"✅ Copied matched backdrop image {matched_filename} to {project_folder}\")\n", + " except Exception as e:\n", + " logger.error(f\"❌ Failed to copy matched backdrop {matched_image_path}: {e}\")\n", + " \n", + " # copy non‐matched files\n", + " for fname in os.listdir(matched_folder):\n", + " # if fname in (os.path.basename(matched_image_path), 'project.json'):\n", + " if fname in {matched_filename, 'project.json'}:\n", + " continue\n", + " # shutil.copy2(os.path.join(matched_folder, fname),\n", + " # os.path.join(project_folder, fname))\n", + " src = os.path.join(matched_folder, fname)\n", + " dst = os.path.join(project_folder, fname)\n", + " if os.path.isfile(src):\n", + " try:\n", + " shutil.copy2(src, dst)\n", + " logger.info(f\"Copied additional backdrop asset {fname} to project folder\")\n", + " except Exception as e:\n", + " logger.error(f\"Failed to copy {src}: {e}\")\n", + "\n", + " # append the stage‐target from its project.json\n", + " pj = os.path.join(matched_folder, 'project.json')\n", + " if os.path.exists(pj):\n", + " with open(pj, 'r') as f:\n", + " bd_json = json.load(f)\n", + " for tgt in bd_json.get(\"targets\", []):\n", + " if tgt.get(\"isStage\"):\n", + " backdrop_data.append(tgt)\n", + " else:\n", + " logger.warning(f\"No project.json in {matched_folder}\")\n", + "\n", + "\n", + " # =========================================\n", + " # Merge into final Scratch project.json\n", + " # =========================================\n", + " final_project = {\n", + " \"targets\": [], \"monitors\": [], \"extensions\": [],\n", + " \"meta\": {\n", + " \"semver\": \"3.0.0\",\n", + " \"vm\": \"11.3.0\",\n", + " \"agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36\"\n", + " }\n", + " }\n", + " # sprites first\n", + " for spr in project_data:\n", + " if not spr.get(\"isStage\", False):\n", + " final_project[\"targets\"].append(spr)\n", + "\n", + " # then backdrop as the Stage\n", + " if backdrop_data:\n", + " all_costumes, sounds = [], []\n", + " seen_costumes = set()\n", + " for i, bd in enumerate(backdrop_data):\n", + " for costume in bd.get(\"costumes\", []):\n", + " # Create a unique key for the costume\n", + " key = (costume.get(\"name\"), costume.get(\"assetId\"))\n", + " if key not in seen_costumes:\n", + " seen_costumes.add(key)\n", + " all_costumes.append(costume)\n", + " \n", + " if i == 0:\n", + " sounds = bd.get(\"sounds\", [])\n", + " stage_obj={\n", + " \"isStage\": True,\n", + " \"name\": \"Stage\",\n", + " \"objName\": \"Stage\",\n", + " \"variables\": {}, \n", + " \"lists\": {}, \n", + " \"broadcasts\": {},\n", + " \"blocks\": {}, \n", + " \"comments\": {},\n", + " \"currentCostume\": 1 if len(all_costumes) > 1 else 0,\n", + " \"costumes\": all_costumes,\n", + " \"sounds\": sounds,\n", + " \"volume\": 100, \n", + " \"layerOrder\": 0,\n", + " \"tempo\": 60, \n", + " \"videoTransparency\": 50,\n", + " \"videoState\": \"on\",\n", + " \"textToSpeechLanguage\": None\n", + " }\n", + " final_project[\"targets\"].insert(0, stage_obj)\n", + " else:\n", + " logger.warning(\"⚠️ No backdrop matched. Using default static backdrop.\")\n", + " default_backdrop_path = BACKDROP_DIR / \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + " default_backdrop_name = \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + "\n", + " default_backdrop_sound = BACKDROP_DIR / \"83a9787d4cb6f3b7632b4ddfebf74367.wav\"\n", + " default_backdrop_sound_name = \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + " try:\n", + " shutil.copy2(default_backdrop_path, os.path.join(project_folder, default_backdrop_name))\n", + " logger.info(f\"✅ Default backdrop copied to project: {default_backdrop_name}\")\n", + "\n", + " shutil.copy2(default_backdrop_sound, os.path.join(project_folder, default_backdrop_sound_name))\n", + " logger.info(f\"✅ Default backdrop sound copied to project: {default_backdrop_sound_name}\")\n", + " except Exception as e:\n", + " logger.error(f\"❌ Failed to copy default backdrop: {e}\")\n", + " \n", + " stage_obj={\n", + " \"isStage\": True,\n", + " \"name\": \"Stage\",\n", + " \"objName\": \"Stage\",\n", + " \"variables\": {},\n", + " \"lists\": {},\n", + " \"broadcasts\": {},\n", + " \"blocks\": {},\n", + " \"comments\": {},\n", + " \"currentCostume\": 0,\n", + " \"costumes\": [\n", + " {\n", + " \"assetId\": default_backdrop_name.split(\".\")[0],\n", + " \"name\": \"defaultBackdrop\",\n", + " \"md5ext\": default_backdrop_name,\n", + " \"dataFormat\": \"png\",\n", + " \"rotationCenterX\": 240,\n", + " \"rotationCenterY\": 180\n", + " }\n", + " ],\n", + " \"sounds\": [\n", + " {\n", + " \"name\": \"pop\",\n", + " \"assetId\": \"83a9787d4cb6f3b7632b4ddfebf74367\",\n", + " \"dataFormat\": \"wav\",\n", + " \"format\": \",\n", + " \"rate\": 48000,\n", + " \"sampleCount\": 1123,\n", + " \"md5ext\": \"83a9787d4cb6f3b7632b4ddfebf74367.wav\"\n", + " }\n", + " ],\n", + " \"volume\": 100,\n", + " \"layerOrder\": 0,\n", + " \"tempo\": 60,\n", + " \"videoTransparency\": 50,\n", + " \"videoState\": \"on\",\n", + " \"textToSpeechLanguage\": None\n", + " }\n", + " final_project[\"targets\"].insert(0, stage_obj)\n", + "\n", + " with open(project_json_path, 'w') as f:\n", + " json.dump(final_project, f, indent=2)\n", + " \n", + " return project_json_path" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ae07fa30", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1a8f2916", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "unstructured_inference is not installed. Cannot use the hi_res partitioning strategy. Falling back to partitioning with another strategy.\n", + "Falling back to partitioning with ocr_only.\n" + ] + }, + { + "ename": "RuntimeError", + "evalue": "❌ Error in extract_images_from_pdf: ❌ Failed to extract images from PDF: /usr/bin/tesseract is not installed or it's not in your PATH. See README file for more information.", + "output_type": "error", + "traceback": [ + "\u001b[31m---------------------------------------------------------------------------\u001b[39m", + "\u001b[31mFileNotFoundError\u001b[39m Traceback (most recent call last)", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\pytesseract\\pytesseract.py:275\u001b[39m, in \u001b[36mrun_tesseract\u001b[39m\u001b[34m(input_filename, output_filename_base, extension, lang, config, nice, timeout)\u001b[39m\n\u001b[32m 274\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m275\u001b[39m proc = \u001b[43msubprocess\u001b[49m\u001b[43m.\u001b[49m\u001b[43mPopen\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcmd_args\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43msubprocess_args\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 276\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mOSError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\subprocess.py:1026\u001b[39m, in \u001b[36mPopen.__init__\u001b[39m\u001b[34m(self, args, bufsize, executable, stdin, stdout, stderr, preexec_fn, close_fds, shell, cwd, env, universal_newlines, startupinfo, creationflags, restore_signals, start_new_session, pass_fds, user, group, extra_groups, encoding, errors, text, umask, pipesize, process_group)\u001b[39m\n\u001b[32m 1023\u001b[39m \u001b[38;5;28mself\u001b[39m.stderr = io.TextIOWrapper(\u001b[38;5;28mself\u001b[39m.stderr,\n\u001b[32m 1024\u001b[39m encoding=encoding, errors=errors)\n\u001b[32m-> \u001b[39m\u001b[32m1026\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_execute_child\u001b[49m\u001b[43m(\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mexecutable\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpreexec_fn\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mclose_fds\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1027\u001b[39m \u001b[43m \u001b[49m\u001b[43mpass_fds\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcwd\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43menv\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1028\u001b[39m \u001b[43m \u001b[49m\u001b[43mstartupinfo\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcreationflags\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mshell\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1029\u001b[39m \u001b[43m \u001b[49m\u001b[43mp2cread\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mp2cwrite\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1030\u001b[39m \u001b[43m \u001b[49m\u001b[43mc2pread\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mc2pwrite\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1031\u001b[39m \u001b[43m \u001b[49m\u001b[43merrread\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43merrwrite\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1032\u001b[39m \u001b[43m \u001b[49m\u001b[43mrestore_signals\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1033\u001b[39m \u001b[43m \u001b[49m\u001b[43mgid\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mgids\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43muid\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mumask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1034\u001b[39m \u001b[43m \u001b[49m\u001b[43mstart_new_session\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mprocess_group\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1035\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m:\n\u001b[32m 1036\u001b[39m \u001b[38;5;66;03m# Cleanup if the child failed starting.\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\subprocess.py:1538\u001b[39m, in \u001b[36mPopen._execute_child\u001b[39m\u001b[34m(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, unused_restore_signals, unused_gid, unused_gids, unused_uid, unused_umask, unused_start_new_session, unused_process_group)\u001b[39m\n\u001b[32m 1537\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1538\u001b[39m hp, ht, pid, tid = \u001b[43m_winapi\u001b[49m\u001b[43m.\u001b[49m\u001b[43mCreateProcess\u001b[49m\u001b[43m(\u001b[49m\u001b[43mexecutable\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1539\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# no special security\u001b[39;49;00m\n\u001b[32m 1540\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 1541\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43mint\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;129;43;01mnot\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mclose_fds\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1542\u001b[39m \u001b[43m \u001b[49m\u001b[43mcreationflags\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1543\u001b[39m \u001b[43m \u001b[49m\u001b[43menv\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1544\u001b[39m \u001b[43m \u001b[49m\u001b[43mcwd\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 1545\u001b[39m \u001b[43m \u001b[49m\u001b[43mstartupinfo\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1546\u001b[39m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[32m 1547\u001b[39m \u001b[38;5;66;03m# Child is launched. Close the parent's copy of those pipe\u001b[39;00m\n\u001b[32m 1548\u001b[39m \u001b[38;5;66;03m# handles that only the child should have open. You need\u001b[39;00m\n\u001b[32m (...)\u001b[39m\u001b[32m 1551\u001b[39m \u001b[38;5;66;03m# pipe will not close when the child process exits and the\u001b[39;00m\n\u001b[32m 1552\u001b[39m \u001b[38;5;66;03m# ReadFile will hang.\u001b[39;00m\n", + "\u001b[31mFileNotFoundError\u001b[39m: [WinError 2] The system cannot find the file specified", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[31mTesseractNotFoundError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[32]\u001b[39m\u001b[32m, line 13\u001b[39m, in \u001b[36mextract_images_from_pdf\u001b[39m\u001b[34m(pdf_stream)\u001b[39m\n\u001b[32m 12\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m---> \u001b[39m\u001b[32m13\u001b[39m elements = \u001b[43mpartition_pdf\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 14\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# filename=str(pdf_path), # partition_pdf might expect a string\u001b[39;49;00m\n\u001b[32m 15\u001b[39m \u001b[43m \u001b[49m\u001b[43mfile\u001b[49m\u001b[43m=\u001b[49m\u001b[43mpdf_stream\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# 'file=', inplace of 'filename'\u001b[39;49;00m\n\u001b[32m 16\u001b[39m \u001b[43m \u001b[49m\u001b[43mstrategy\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mhi_res\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 17\u001b[39m \u001b[43m \u001b[49m\u001b[43mextract_image_block_types\u001b[49m\u001b[43m=\u001b[49m\u001b[43m[\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mImage\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 18\u001b[39m \u001b[43m \u001b[49m\u001b[43mhi_res_model_name\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43myolox\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 19\u001b[39m \u001b[43m \u001b[49m\u001b[43mextract_image_block_to_payload\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 20\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 21\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mELEMENTS\u001b[39m\u001b[33m\"\u001b[39m)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\unstructured\\documents\\elements.py:237\u001b[39m, in \u001b[36mprocess_metadata..decorator..wrapper\u001b[39m\u001b[34m(*args, **kwargs)\u001b[39m\n\u001b[32m 235\u001b[39m \u001b[38;5;129m@wraps\u001b[39m(func)\n\u001b[32m 236\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mwrapper\u001b[39m(*args, **kwargs):\n\u001b[32m--> \u001b[39m\u001b[32m237\u001b[39m elements = \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 238\u001b[39m sig = inspect.signature(func)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\unstructured\\file_utils\\filetype.py:630\u001b[39m, in \u001b[36madd_metadata_with_filetype..decorator..wrapper\u001b[39m\u001b[34m(*args, **kwargs)\u001b[39m\n\u001b[32m 628\u001b[39m \u001b[38;5;129m@wraps\u001b[39m(func)\n\u001b[32m 629\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mwrapper\u001b[39m(*args, **kwargs):\n\u001b[32m--> \u001b[39m\u001b[32m630\u001b[39m elements = \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 631\u001b[39m sig = inspect.signature(func)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\unstructured\\partition\\pdf.py:95\u001b[39m, in \u001b[36mpartition_pdf\u001b[39m\u001b[34m(filename, file, include_page_breaks, strategy, infer_table_structure, ocr_languages, max_partition, min_partition, include_metadata, metadata_filename, metadata_last_modified, **kwargs)\u001b[39m\n\u001b[32m 94\u001b[39m exactly_one(filename=filename, file=file)\n\u001b[32m---> \u001b[39m\u001b[32m95\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mpartition_pdf_or_image\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 96\u001b[39m \u001b[43m \u001b[49m\u001b[43mfilename\u001b[49m\u001b[43m=\u001b[49m\u001b[43mfilename\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 97\u001b[39m \u001b[43m \u001b[49m\u001b[43mfile\u001b[49m\u001b[43m=\u001b[49m\u001b[43mfile\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 98\u001b[39m \u001b[43m \u001b[49m\u001b[43minclude_page_breaks\u001b[49m\u001b[43m=\u001b[49m\u001b[43minclude_page_breaks\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 99\u001b[39m \u001b[43m \u001b[49m\u001b[43mstrategy\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstrategy\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 100\u001b[39m \u001b[43m \u001b[49m\u001b[43minfer_table_structure\u001b[49m\u001b[43m=\u001b[49m\u001b[43minfer_table_structure\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 101\u001b[39m \u001b[43m \u001b[49m\u001b[43mocr_languages\u001b[49m\u001b[43m=\u001b[49m\u001b[43mocr_languages\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 102\u001b[39m \u001b[43m \u001b[49m\u001b[43mmax_partition\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmax_partition\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 103\u001b[39m \u001b[43m \u001b[49m\u001b[43mmin_partition\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmin_partition\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 104\u001b[39m \u001b[43m \u001b[49m\u001b[43mmetadata_last_modified\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmetadata_last_modified\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 105\u001b[39m \u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 106\u001b[39m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\unstructured\\partition\\pdf.py:201\u001b[39m, in \u001b[36mpartition_pdf_or_image\u001b[39m\u001b[34m(filename, file, is_image, include_page_breaks, strategy, infer_table_structure, ocr_languages, max_partition, min_partition, metadata_last_modified, **kwargs)\u001b[39m\n\u001b[32m 200\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m warnings.catch_warnings():\n\u001b[32m--> \u001b[39m\u001b[32m201\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_partition_pdf_or_image_with_ocr\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 202\u001b[39m \u001b[43m \u001b[49m\u001b[43mfilename\u001b[49m\u001b[43m=\u001b[49m\u001b[43mfilename\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 203\u001b[39m \u001b[43m \u001b[49m\u001b[43mfile\u001b[49m\u001b[43m=\u001b[49m\u001b[43mfile\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 204\u001b[39m \u001b[43m \u001b[49m\u001b[43minclude_page_breaks\u001b[49m\u001b[43m=\u001b[49m\u001b[43minclude_page_breaks\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 205\u001b[39m \u001b[43m \u001b[49m\u001b[43mocr_languages\u001b[49m\u001b[43m=\u001b[49m\u001b[43mocr_languages\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 206\u001b[39m \u001b[43m \u001b[49m\u001b[43mis_image\u001b[49m\u001b[43m=\u001b[49m\u001b[43mis_image\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 207\u001b[39m \u001b[43m \u001b[49m\u001b[43mmax_partition\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmax_partition\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 208\u001b[39m \u001b[43m \u001b[49m\u001b[43mmin_partition\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmin_partition\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 209\u001b[39m \u001b[43m \u001b[49m\u001b[43mmetadata_last_modified\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmetadata_last_modified\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mlast_modification_date\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 210\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 211\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m layout_elements\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\unstructured\\utils.py:43\u001b[39m, in \u001b[36mrequires_dependencies..decorator..wrapper\u001b[39m\u001b[34m(*args, **kwargs)\u001b[39m\n\u001b[32m 35\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mImportError\u001b[39;00m(\n\u001b[32m 36\u001b[39m \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mFollowing dependencies are missing: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[33m'\u001b[39m\u001b[33m, \u001b[39m\u001b[33m'\u001b[39m.join(missing_deps)\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m. \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 37\u001b[39m + (\n\u001b[32m (...)\u001b[39m\u001b[32m 41\u001b[39m ),\n\u001b[32m 42\u001b[39m )\n\u001b[32m---> \u001b[39m\u001b[32m43\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\unstructured\\partition\\pdf.py:479\u001b[39m, in \u001b[36m_partition_pdf_or_image_with_ocr\u001b[39m\u001b[34m(filename, file, include_page_breaks, ocr_languages, is_image, max_partition, min_partition, metadata_last_modified)\u001b[39m\n\u001b[32m 474\u001b[39m metadata = ElementMetadata(\n\u001b[32m 475\u001b[39m filename=filename,\n\u001b[32m 476\u001b[39m page_number=page_number,\n\u001b[32m 477\u001b[39m last_modified=metadata_last_modified,\n\u001b[32m 478\u001b[39m )\n\u001b[32m--> \u001b[39m\u001b[32m479\u001b[39m text = \u001b[43mpytesseract\u001b[49m\u001b[43m.\u001b[49m\u001b[43mimage_to_string\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimage\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43mf\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43m-l \u001b[39;49m\u001b[33;43m'\u001b[39;49m\u001b[38;5;132;43;01m{\u001b[39;49;00m\u001b[43mocr_languages\u001b[49m\u001b[38;5;132;43;01m}\u001b[39;49;00m\u001b[33;43m'\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[32m 481\u001b[39m _elements = partition_text(\n\u001b[32m 482\u001b[39m text=text,\n\u001b[32m 483\u001b[39m max_partition=max_partition,\n\u001b[32m 484\u001b[39m min_partition=min_partition,\n\u001b[32m 485\u001b[39m )\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\pytesseract\\pytesseract.py:486\u001b[39m, in \u001b[36mimage_to_string\u001b[39m\u001b[34m(image, lang, config, nice, output_type, timeout)\u001b[39m\n\u001b[32m 484\u001b[39m args = [image, \u001b[33m'\u001b[39m\u001b[33mtxt\u001b[39m\u001b[33m'\u001b[39m, lang, config, nice, timeout]\n\u001b[32m--> \u001b[39m\u001b[32m486\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m{\u001b[49m\n\u001b[32m 487\u001b[39m \u001b[43m \u001b[49m\u001b[43mOutput\u001b[49m\u001b[43m.\u001b[49m\u001b[43mBYTES\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mlambda\u001b[39;49;00m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_and_get_output\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43m(\u001b[49m\u001b[43margs\u001b[49m\u001b[43m \u001b[49m\u001b[43m+\u001b[49m\u001b[43m \u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 488\u001b[39m \u001b[43m \u001b[49m\u001b[43mOutput\u001b[49m\u001b[43m.\u001b[49m\u001b[43mDICT\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mlambda\u001b[39;49;00m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43m{\u001b[49m\u001b[33;43m'\u001b[39;49m\u001b[33;43mtext\u001b[39;49m\u001b[33;43m'\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_and_get_output\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 489\u001b[39m \u001b[43m \u001b[49m\u001b[43mOutput\u001b[49m\u001b[43m.\u001b[49m\u001b[43mSTRING\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mlambda\u001b[39;49;00m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_and_get_output\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 490\u001b[39m \u001b[43m\u001b[49m\u001b[43m}\u001b[49m\u001b[43m[\u001b[49m\u001b[43moutput_type\u001b[49m\u001b[43m]\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\pytesseract\\pytesseract.py:489\u001b[39m, in \u001b[36mimage_to_string..\u001b[39m\u001b[34m()\u001b[39m\n\u001b[32m 484\u001b[39m args = [image, \u001b[33m'\u001b[39m\u001b[33mtxt\u001b[39m\u001b[33m'\u001b[39m, lang, config, nice, timeout]\n\u001b[32m 486\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m {\n\u001b[32m 487\u001b[39m Output.BYTES: \u001b[38;5;28;01mlambda\u001b[39;00m: run_and_get_output(*(args + [\u001b[38;5;28;01mTrue\u001b[39;00m])),\n\u001b[32m 488\u001b[39m Output.DICT: \u001b[38;5;28;01mlambda\u001b[39;00m: {\u001b[33m'\u001b[39m\u001b[33mtext\u001b[39m\u001b[33m'\u001b[39m: run_and_get_output(*args)},\n\u001b[32m--> \u001b[39m\u001b[32m489\u001b[39m Output.STRING: \u001b[38;5;28;01mlambda\u001b[39;00m: \u001b[43mrun_and_get_output\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m,\n\u001b[32m 490\u001b[39m }[output_type]()\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\pytesseract\\pytesseract.py:352\u001b[39m, in \u001b[36mrun_and_get_output\u001b[39m\u001b[34m(image, extension, lang, config, nice, timeout, return_bytes)\u001b[39m\n\u001b[32m 342\u001b[39m kwargs = {\n\u001b[32m 343\u001b[39m \u001b[33m'\u001b[39m\u001b[33minput_filename\u001b[39m\u001b[33m'\u001b[39m: input_filename,\n\u001b[32m 344\u001b[39m \u001b[33m'\u001b[39m\u001b[33moutput_filename_base\u001b[39m\u001b[33m'\u001b[39m: temp_name,\n\u001b[32m (...)\u001b[39m\u001b[32m 349\u001b[39m \u001b[33m'\u001b[39m\u001b[33mtimeout\u001b[39m\u001b[33m'\u001b[39m: timeout,\n\u001b[32m 350\u001b[39m }\n\u001b[32m--> \u001b[39m\u001b[32m352\u001b[39m \u001b[43mrun_tesseract\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 353\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m _read_output(\n\u001b[32m 354\u001b[39m \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mkwargs[\u001b[33m'\u001b[39m\u001b[33moutput_filename_base\u001b[39m\u001b[33m'\u001b[39m]\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;132;01m{\u001b[39;00mextsep\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;132;01m{\u001b[39;00mextension\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m\"\u001b[39m,\n\u001b[32m 355\u001b[39m return_bytes,\n\u001b[32m 356\u001b[39m )\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\pytesseract\\pytesseract.py:280\u001b[39m, in \u001b[36mrun_tesseract\u001b[39m\u001b[34m(input_filename, output_filename_base, extension, lang, config, nice, timeout)\u001b[39m\n\u001b[32m 279\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m280\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m TesseractNotFoundError()\n\u001b[32m 282\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m timeout_manager(proc, timeout) \u001b[38;5;28;01mas\u001b[39;00m error_string:\n", + "\u001b[31mTesseractNotFoundError\u001b[39m: /usr/bin/tesseract is not installed or it's not in your PATH. See README file for more information.", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[31mRuntimeError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[32]\u001b[39m\u001b[32m, line 23\u001b[39m, in \u001b[36mextract_images_from_pdf\u001b[39m\u001b[34m(pdf_stream)\u001b[39m\n\u001b[32m 22\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[32m---> \u001b[39m\u001b[32m23\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[32m 24\u001b[39m \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33m❌ Failed to extract images from PDF: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mstr\u001b[39m(e)\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m\"\u001b[39m)\n\u001b[32m 26\u001b[39m file_elements = [element.to_dict() \u001b[38;5;28;01mfor\u001b[39;00m element \u001b[38;5;129;01min\u001b[39;00m elements]\n", + "\u001b[31mRuntimeError\u001b[39m: ❌ Failed to extract images from PDF: /usr/bin/tesseract is not installed or it's not in your PATH. See README file for more information.", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[31mRuntimeError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[37]\u001b[39m\u001b[32m, line 1\u001b[39m\n\u001b[32m----> \u001b[39m\u001b[32m1\u001b[39m output_path = \u001b[43mextract_images_from_pdf\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpdf_stream\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 2\u001b[39m project_folder= \u001b[33mr\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mD:\u001b[39m\u001b[33m\\\u001b[39m\u001b[33mDEV PATEL\u001b[39m\u001b[33m\\\u001b[39m\u001b[33m2025\u001b[39m\u001b[33m\\\u001b[39m\u001b[33mscratch_VLM\u001b[39m\u001b[33m\\\u001b[39m\u001b[33mscratch_agent\u001b[39m\u001b[33m\\\u001b[39m\u001b[33mtest_files\u001b[39m\u001b[33m\\\u001b[39m\u001b[33msmall\u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 3\u001b[39m project_output = similarity_matching(output_path, project_folder)\n", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[32]\u001b[39m\u001b[32m, line 43\u001b[39m, in \u001b[36mextract_images_from_pdf\u001b[39m\u001b[34m(pdf_stream)\u001b[39m\n\u001b[32m 41\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m manipulated_json\n\u001b[32m 42\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[32m---> \u001b[39m\u001b[32m43\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33m❌ Error in extract_images_from_pdf: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mstr\u001b[39m(e)\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m\"\u001b[39m)\n", + "\u001b[31mRuntimeError\u001b[39m: ❌ Error in extract_images_from_pdf: ❌ Failed to extract images from PDF: /usr/bin/tesseract is not installed or it's not in your PATH. See README file for more information." + ] + } + ], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "193d3d6a", + "metadata": {}, + "outputs": [], + "source": [ + "print(\"project_output\")" + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "id": "586ccfc8", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "384" + ] + }, + "execution_count": 39, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(embeddings)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "877ea667", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "d1cbfb8d", + "metadata": {}, + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "93f17975", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "4fffb016", + "metadata": {}, + "source": [ + "# Resolving the test of the images" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "fe4a1ea6", + "metadata": {}, + "outputs": [], + "source": [ + "import cv2, json,base64,io,os,tempfile,logging, re\n", + "import numpy as np\n", + "from unstructured.partition.pdf import partition_pdf\n", + "from PIL import Image\n", + "# from imutils.perspective import four_point_transform\n", + "from dotenv import load_dotenv\n", + "import pytesseract\n", + "# from werkzeug.utils import secure_filename\n", + "# from langchain_groq import ChatGroq\n", + "# from langgraph.prebuilt import create_react_agent\n", + "from pdf2image import convert_from_path, convert_from_bytes\n", + "from concurrent.futures import ThreadPoolExecutor\n", + "from pdf2image.exceptions import PDFInfoNotInstalledError\n", + "from typing import Dict, TypedDict, Optional, Any\n", + "from langgraph.graph import StateGraph, END\n", + "import uuid\n", + "import shutil, time, functools\n", + "# from langchain_experimental.open_clip.open_clip import OpenCLIPEmbeddings\n", + "# from langchain_core.utils.utils import secret_from_env\n", + "# from matplotlib.offsetbox import OffsetImage, AnnotationBbox\n", + "from io import BytesIO\n", + "from pathlib import Path\n", + "import os\n", + "#from utils.block_relation_builder import block_builder, separate_scripts, transform_logic_to_action_flow, analyze_opcode_counts\n", + "# from langchain.chat_models import ChatOpenAI\n", + "# from langchain_openai import ChatOpenAI\n", + "from pydantic import Field, SecretStr\n", + "from difflib import get_close_matches\n", + "# pytesseract.pytesseract.tesseract_cmd = (r'/usr/bin/tesseract')\n", + "import os\n", + "import pytesseract\n", + "\n", + "# Explicitly set the path to the Tesseract executable\n", + "# Use a raw string (r'...') to handle backslashes correctly\n", + "pytesseract.pytesseract.tesseract_cmd = r'C:\\Program Files\\Tesseract-OCR\\tesseract.exe'\n", + "\n", + "# Explicitly set the path to the Tesseract language data files\n", + "# This is the path to the 'tessdata' folder itself\n", + "os.environ['TESSDATA_PREFIX'] = r'C:\\Program Files\\Tesseract-OCR\\tessdata'" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "dd0aec53", + "metadata": {}, + "outputs": [], + "source": [ + "from pathlib import Path\n", + "import os\n", + "\n", + "BASE_DIR = Path(r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\")\n", + "BLOCKS_DIR = BASE_DIR / \"blocks\"\n", + "STATIC_DIR = BASE_DIR / \"static\"\n", + "GEN_PROJECT_DIR = BASE_DIR / \"generated_projects\"\n", + "BACKDROP_DIR = BLOCKS_DIR / \"Backdrops\"\n", + "SPRITE_DIR = BLOCKS_DIR / \"sprites\"\n", + "CODE_BLOCKS_DIR = BLOCKS_DIR / \"code_blocks\"\n", + "OUTPUT_DIR = BASE_DIR / \"outputs\"\n", + "\n", + "for d in (\n", + " BLOCKS_DIR,\n", + " STATIC_DIR,\n", + " GEN_PROJECT_DIR,\n", + " BACKDROP_DIR,\n", + " SPRITE_DIR,\n", + " CODE_BLOCKS_DIR,\n", + " OUTPUT_DIR,\n", + " # DETECTED_IMAGE_DIR,\n", + " # SCANNED_IMAGE_DIR,\n", + " # JSON_DIR,\n", + "):\n", + " d.mkdir(parents=True, exist_ok=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bb55e101", + "metadata": {}, + "outputs": [], + "source": [ + "import io\n", + "\n", + "# Replace this with the actual path to your PDF file\n", + "pdf_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\New folder\\c05f433d840a416580b513835a6bfcf2.pdf\"\n", + "\n", + "# Open and read the PDF file as bytes, then wrap it in a BytesIO stream\n", + "with open(pdf_path, \"rb\") as pdf_file:\n", + " pdf_bytes = pdf_file.read()\n", + " pdf_stream = io.BytesIO(pdf_bytes)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "a10be0b4", + "metadata": {}, + "outputs": [], + "source": [ + "# !pip install huggingface_hub[hf_xet]" + ] + }, + { + "cell_type": "markdown", + "id": "7bc1da3e", + "metadata": {}, + "source": [ + "# Working on the embedding flow for the result getting" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "516ff9d8", + "metadata": {}, + "outputs": [], + "source": [ + "import uuid\n", + "import shutil\n", + "import tempfile\n", + "from langchain_experimental.open_clip.open_clip import OpenCLIPEmbeddings\n", + "from matplotlib.offsetbox import OffsetImage, AnnotationBbox\n", + "from io import BytesIO\n", + "import logging\n", + "from dotenv import load_dotenv\n", + "import os\n", + "from langchain_groq import ChatGroq\n", + "import json\n", + "\n", + "\n", + "# ============================== #\n", + "# DEFINE PATHS #\n", + "# ============================== #\n", + "backdrop_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\Backdrops\"\n", + "sprite_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\sprites\"\n", + "code_blocks_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\code_blocks\"\n", + "image_dirs = [backdrop_images_path, sprite_images_path, code_blocks_path]\n", + " \n", + "# ============================== #\n", + "# INITIALIZE CLIP EMBEDDER #\n", + "# ============================== #\n", + "clip_embd = OpenCLIPEmbeddings()\n", + " \n", + "# ========================================= #\n", + "# Walk folders to collect all image paths #\n", + "# ========================================= #\n", + "folder_image_paths = []\n", + "for image_dir in image_dirs:\n", + " for root, _, files in os.walk(image_dir):\n", + " for fname in files:\n", + " if fname.lower().endswith((\".png\", \".jpg\", \".jpeg\")):\n", + " folder_image_paths.append(os.path.join(root, fname))\n", + " \n", + "# # ============================== #\n", + "# EMBED FOLDER IMAGES (REF) #\n", + "# ============================== #\n", + "img_features = clip_embd.embed_image(folder_image_paths)\n", + " \n", + "# ============================== #\n", + "# Store image embeddings #\n", + "# ============================== #\n", + "embedding_json = []\n", + "for i, path in enumerate(folder_image_paths):\n", + " embedding_json.append({\n", + " \"name\":os.path.basename(path),\n", + " \"file-path\": path,\n", + " \"embeddings\": list(img_features[i])\n", + " })\n", + " \n", + "# Save to embeddings.json\n", + "with open(r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_v3\\Scratch_Vision_Game\\blocks\\openclip_embeddings.json\", \"w\") as f:\n", + " json.dump(embedding_json, f, indent=2)" + ] + }, + { + "cell_type": "markdown", + "id": "050d51ee", + "metadata": {}, + "source": [ + "## v2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a5869443", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading CLIP model 'clip-ViT-L-14'...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\huggingface_hub\\file_download.py:143: UserWarning: `huggingface_hub` cache-system uses symlinks by default to efficiently store duplicated files but your machine does not support them in C:\\Users\\Admin\\.cache\\huggingface\\hub\\models--sentence-transformers--clip-ViT-L-14. Caching files will still work but in a degraded version that might require more space on your disk. This warning can be disabled by setting the `HF_HUB_DISABLE_SYMLINKS_WARNING` environment variable. For more details, see https://huggingface.co/docs/huggingface_hub/how-to-cache#limitations.\n", + "To support symlinks on Windows, you either need to activate Developer Mode or to run Python as an administrator. In order to activate developer mode, see this article: https://docs.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development\n", + " warnings.warn(message)\n", + "Using a slow image processor as `use_fast` is unset and a slow processor was saved with this model. `use_fast=True` will be the default behavior in v4.52, even if the model was saved with a slow processor. This will result in minor differences in outputs. You'll still be able to use a slow processor with `use_fast=False`.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Model loaded successfully.\n", + "Found 993 images. Generating embeddings...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Batches: 100%|██████████| 32/32 [00:39<00:00, 1.23s/it]" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Embeddings generated.\n", + "Creating FAISS index with dimension 768...\n", + "FAISS index saved to D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_v3\\Scratch_Vision_Game\\blocks\\faiss_index.bin\n", + "Image paths saved to D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_v3\\Scratch_Vision_Game\\blocks\\image_paths.json\n", + "Embedding creation and storage process complete.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "# import os\n", + "# import json\n", + "# import numpy as np\n", + "# import faiss\n", + "# from sentence_transformers import SentenceTransformer\n", + "# from PIL import Image\n", + "\n", + "# # ============================== #\n", + "# # DEFINE PATHS #\n", + "# # ============================== #\n", + "# backdrop_images_path = \"D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_v3\\\\Scratch_Vision_Game\\\\blocks\\\\Backdrops\"\n", + "# sprite_images_path = \"D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_v3\\\\Scratch_Vision_Game\\\\blocks\\\\sprites\"\n", + "# code_blocks_path = \"D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_v3\\\\Scratch_Vision_Game\\\\blocks\\\\code_blocks\"\n", + "# output_dir = \"D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_v3\\\\Scratch_Vision_Game\\\\blocks\"\n", + "\n", + "# image_dirs = [backdrop_images_path, sprite_images_path, code_blocks_path]\n", + "\n", + "# # ======================================= #\n", + "# # INITIALIZE CLIP EMBEDDER (L-14) #\n", + "# # ======================================= #\n", + "# # Using the more powerful clip-ViT-L-14 model as discussed.\n", + "# print(\"Loading CLIP model 'clip-ViT-L-14'...\")\n", + "# model = SentenceTransformer('clip-ViT-L-14')\n", + "# print(\"Model loaded successfully.\")\n", + "\n", + "# # ========================================= #\n", + "# # Walk folders to collect all image paths #\n", + "# # ========================================= #\n", + "# folder_image_paths = []\n", + "# for image_dir in image_dirs:\n", + "# for root, _, files in os.walk(image_dir):\n", + "# for fname in files:\n", + "# if fname.lower().endswith((\".png\", \".jpg\", \".jpeg\")):\n", + "# folder_image_paths.append(os.path.join(root, fname))\n", + "\n", + "# if not folder_image_paths:\n", + "# print(\"No images found in the specified directories. Exiting.\")\n", + "# exit()\n", + "\n", + "# print(f\"Found {len(folder_image_paths)} images. Generating embeddings...\")\n", + "\n", + "# # ============================== #\n", + "# # EMBED FOLDER IMAGES #\n", + "# # ============================== #\n", + "# # This uses the SentenceTransformer to encode all images into embeddings.\n", + "# # The `convert_image_to_features` is a helper function to prepare the data.\n", + "# img_features = model.encode(folder_image_paths, batch_size=32, show_progress_bar=True)\n", + "# print(\"Embeddings generated.\")\n", + "\n", + "# # ============================== #\n", + "# # CREATE AND STORE FAISS INDEX #\n", + "# # ============================== #\n", + "# # The embedding vector dimension is the second element of the shape.\n", + "# # For clip-ViT-L-14, this is 768.\n", + "# embedding_dim = img_features.shape[1]\n", + "\n", + "# # Create a FAISS index. IndexFlatL2 is a simple and fast exact search index.\n", + "# # The \"L2\" stands for L2 distance (Euclidean distance), which is a common metric for similarity.\n", + "# print(f\"Creating FAISS index with dimension {embedding_dim}...\")\n", + "# index = faiss.IndexFlatL2(embedding_dim)\n", + "\n", + "# # Add the generated embeddings to the index.\n", + "# # FAISS requires the data to be in float32.\n", + "# index.add(img_features.astype(np.float32))\n", + "\n", + "# # Save the FAISS index to a file for later use.\n", + "# index_path = os.path.join(output_dir, \"faiss_index.bin\")\n", + "# faiss.write_index(index, index_path)\n", + "# print(f\"FAISS index saved to {index_path}\")\n", + "\n", + "# # ===================================== #\n", + "# # SAVE IMAGE PATHS (METADATA) #\n", + "# # ===================================== #\n", + "# # We need to save the paths separately because the FAISS index only stores vectors.\n", + "# # The order of the paths must match the order of the embeddings added to the index.\n", + "# paths_json_path = os.path.join(output_dir, \"image_paths.json\")\n", + "# with open(paths_json_path, \"w\") as f:\n", + "# json.dump(folder_image_paths, f, indent=2)\n", + "\n", + "# print(f\"Image paths saved to {paths_json_path}\")\n", + "# print(\"Embedding creation and storage process complete.\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8fd0c8fa", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading CLIP model 'clip-ViT-L-14'...\n", + "Model loaded successfully.\n", + "Found 715 images. Loading them into memory...\n", + "Images loaded. Generating embeddings...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Batches: 100%|██████████| 23/23 [06:00<00:00, 15.69s/it]" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Embeddings generated.\n", + "Creating FAISS index with dimension 768...\n", + "FAISS index saved to D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\faiss_index.bin\n", + "Image paths saved to D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\image_paths.json\n", + "Embedding creation and storage process complete.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "# import os\n", + "# import json\n", + "# import numpy as np\n", + "# import faiss\n", + "# from sentence_transformers import SentenceTransformer\n", + "# from PIL import Image\n", + "\n", + "# # ============================== #\n", + "# # DEFINE PATHS #\n", + "# # ============================== #\n", + "# backdrop_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\Backdrops\"\n", + "# sprite_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\sprites\"\n", + "# code_blocks_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\code_blocks\"\n", + "# output_dir = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\"\n", + "\n", + "# image_dirs = [backdrop_images_path, sprite_images_path, code_blocks_path]\n", + "\n", + "# # ======================================= #\n", + "# # INITIALIZE CLIP EMBEDDER (L-14) #\n", + "# # ======================================= #\n", + "# print(\"Loading CLIP model 'clip-ViT-L-14'...\")\n", + "# model = SentenceTransformer('clip-ViT-L-14')\n", + "# print(\"Model loaded successfully.\")\n", + "\n", + "# # ========================================= #\n", + "# # Walk folders to collect all image paths #\n", + "# # ========================================= #\n", + "# folder_image_paths = []\n", + "# for image_dir in image_dirs:\n", + "# for root, _, files in os.walk(image_dir):\n", + "# for fname in files:\n", + "# if fname.lower().endswith((\".png\", \".jpg\", \".jpeg\")):\n", + "# folder_image_paths.append(os.path.join(root, fname))\n", + "\n", + "# if not folder_image_paths:\n", + "# print(\"No images found in the specified directories. Exiting.\")\n", + "# exit()\n", + "\n", + "# print(f\"Found {len(folder_image_paths)} images. Loading them into memory...\")\n", + "\n", + "# # ================================== #\n", + "# # LOAD IMAGES INTO PIL OBJECTS #\n", + "# # ================================== #\n", + "# # This is the key change. We load the images first.\n", + "# # images_to_embed = []\n", + "# # for path in folder_image_paths:\n", + "# # try:\n", + "# # images_to_embed.append(Image.open(path))\n", + "# # except Exception as e:\n", + "# # print(f\"Error loading image {path}: {e}\")\n", + "# # # Skip this image if it cannot be loaded\n", + "# # pass\n", + "# images_to_embed = []\n", + "# for path in folder_image_paths:\n", + "# try:\n", + "# # Open the original image\n", + "# original_img = Image.open(path)\n", + "\n", + "# # Create a new image with a white background\n", + "# # The new image should be in 'RGB' mode to not have transparency\n", + "# final_img = Image.new(\"RGB\", original_img.size, (255, 255, 255))\n", + "\n", + "# # Paste the original image onto the white background.\n", + "# # If the original has transparency (an alpha channel), it will be used as a mask.\n", + "# # If it doesn't, it will just be pasted as is.\n", + "# final_img.paste(original_img, mask=original_img.split()[-1] if original_img.mode == 'RGBA' else None)\n", + " \n", + "# images_to_embed.append(final_img)\n", + "\n", + "# except Exception as e:\n", + "# print(f\"Error loading and processing image {path}: {e}\")\n", + "# # Skip this image if it cannot be loaded or processed\n", + "# pass\n", + "\n", + "# print(\"Images loaded. Generating embeddings...\")\n", + "\n", + "# # ============================== #\n", + "# # EMBED FOLDER IMAGES #\n", + "# # ============================== #\n", + "# # Now we pass the list of PIL Image objects directly to the model.encode function.\n", + "# img_features = model.encode(images_to_embed, batch_size=32, show_progress_bar=True)\n", + "# print(\"Embeddings generated.\")\n", + "\n", + "# # ============================== #\n", + "# # CREATE AND STORE FAISS INDEX #\n", + "# # ============================== #\n", + "# embedding_dim = img_features.shape[1]\n", + "\n", + "# print(f\"Creating FAISS index with dimension {embedding_dim}...\")\n", + "# index = faiss.IndexFlatL2(embedding_dim)\n", + "\n", + "# index.add(img_features.astype(np.float32))\n", + "\n", + "# index_path = os.path.join(output_dir, \"faiss_index.bin\")\n", + "# faiss.write_index(index, index_path)\n", + "# print(f\"FAISS index saved to {index_path}\")\n", + "\n", + "# # ===================================== #\n", + "# # SAVE IMAGE PATHS (METADATA) #\n", + "# # ===================================== #\n", + "# paths_json_path = os.path.join(output_dir, \"image_paths.json\")\n", + "# with open(paths_json_path, \"w\") as f:\n", + "# json.dump(folder_image_paths, f, indent=2)\n", + "\n", + "# print(f\"Image paths saved to {paths_json_path}\")\n", + "# print(\"Embedding creation and storage process complete.\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "95328787", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading CLIP model 'clip-ViT-L-14'...\n", + "Model loaded successfully.\n", + "Found 715 images. Loading them into memory...\n", + "Images loaded. Generating embeddings...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Batches: 100%|██████████| 23/23 [05:54<00:00, 15.42s/it]" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Embeddings generated.\n", + "Creating FAISS index with dimension 768...\n", + "FAISS index saved to D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\faiss_index.bin\n", + "Image paths saved to D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\image_paths.json\n", + "Embedding creation and storage process complete.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "import numpy as np\n", + "import faiss\n", + "from sentence_transformers import SentenceTransformer\n", + "from PIL import Image\n", + "\n", + "# ============================== #\n", + "# DEFINE PATHS #\n", + "# ============================== #\n", + "backdrop_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\Backdrops\"\n", + "sprite_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\sprites\"\n", + "code_blocks_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\code_blocks\"\n", + "output_dir = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\"\n", + "\n", + "image_dirs = [backdrop_images_path, sprite_images_path, code_blocks_path]\n", + "\n", + "# ======================================= #\n", + "# INITIALIZE CLIP EMBEDDER (L-14) #\n", + "# ======================================= #\n", + "print(\"Loading CLIP model 'clip-ViT-L-14'...\")\n", + "model = SentenceTransformer('clip-ViT-L-14')\n", + "print(\"Model loaded successfully.\")\n", + "\n", + "# ========================================= #\n", + "# Walk folders to collect all image paths #\n", + "# ========================================= #\n", + "folder_image_paths = []\n", + "for image_dir in image_dirs:\n", + " for root, _, files in os.walk(image_dir):\n", + " for fname in files:\n", + " if fname.lower().endswith((\".png\", \".jpg\", \".jpeg\")):\n", + " folder_image_paths.append(os.path.join(root, fname))\n", + "\n", + "if not folder_image_paths:\n", + " print(\"No images found in the specified directories. Exiting.\")\n", + " exit()\n", + "\n", + "print(f\"Found {len(folder_image_paths)} images. Loading them into memory...\")\n", + "\n", + "# ================================== #\n", + "# LOAD IMAGES INTO PIL OBJECTS #\n", + "# ================================== #\n", + "# This is the key change. We load the images first.\n", + "# images_to_embed = []\n", + "# for path in folder_image_paths:\n", + "# try:\n", + "# images_to_embed.append(Image.open(path))\n", + "# except Exception as e:\n", + "# print(f\"Error loading image {path}: {e}\")\n", + "# # Skip this image if it cannot be loaded\n", + "# pass\n", + "images_to_embed = []\n", + "for path in folder_image_paths:\n", + " try:\n", + " # Open the original image\n", + " original_img = Image.open(path)\n", + "\n", + " # Create a new image with a white background\n", + " # The new image should be in 'RGB' mode to not have transparency\n", + " final_img = Image.new(\"RGB\", original_img.size, (255, 255, 255))\n", + "\n", + " # Paste the original image onto the white background.\n", + " # If the original has transparency (an alpha channel), it will be used as a mask.\n", + " # If it doesn't, it will just be pasted as is.\n", + " final_img.paste(original_img, mask=original_img.split()[-1] if original_img.mode == 'RGBA' else None)\n", + " \n", + " images_to_embed.append(final_img)\n", + "\n", + " except Exception as e:\n", + " print(f\"Error loading and processing image {path}: {e}\")\n", + " # Skip this image if it cannot be loaded or processed\n", + " pass\n", + "\n", + "print(\"Images loaded. Generating embeddings...\")\n", + "\n", + "# ============================== #\n", + "# EMBED FOLDER IMAGES #\n", + "# ============================== #\n", + "img_features = model.encode(images_to_embed, batch_size=32, show_progress_bar=True)\n", + "print(\"Embeddings generated.\")\n", + "\n", + "# ============================== #\n", + "# NORMALIZE VECTORS FOR IP #\n", + "# ============================== #\n", + "# Normalize the vectors to unit length for accurate cosine similarity\n", + "faiss.normalize_L2(img_features)\n", + "\n", + "# ============================== #\n", + "# CREATE AND STORE FAISS INDEX #\n", + "# ============================== #\n", + "embedding_dim = img_features.shape[1]\n", + "\n", + "print(f\"Creating FAISS index with dimension {embedding_dim}...\")\n", + "# Use an Inner Product (IP) index for cosine similarity\n", + "index = faiss.IndexFlatIP(embedding_dim)\n", + "\n", + "index.add(img_features.astype(np.float32))\n", + "\n", + "index_path = os.path.join(output_dir, \"faiss_index.bin\")\n", + "faiss.write_index(index, index_path)\n", + "print(f\"FAISS index saved to {index_path}\")\n", + "\n", + "# ===================================== #\n", + "# SAVE IMAGE PATHS (METADATA) #\n", + "# ===================================== #\n", + "paths_json_path = os.path.join(output_dir, \"image_paths.json\")\n", + "with open(paths_json_path, \"w\") as f:\n", + " json.dump(folder_image_paths, f, indent=2)\n", + "\n", + "print(f\"Image paths saved to {paths_json_path}\")\n", + "print(\"Embedding creation and storage process complete.\")" + ] + }, + { + "cell_type": "markdown", + "id": "587c4621", + "metadata": {}, + "source": [ + "## little fast indexing" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "74d0d40a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading CLIP model 'clip-ViT-L-14'...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using a slow image processor as `use_fast` is unset and a slow processor was saved with this model. `use_fast=True` will be the default behavior in v4.52, even if the model was saved with a slow processor. This will result in minor differences in outputs. You'll still be able to use a slow processor with `use_fast=False`.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Model loaded successfully.\n", + "Found 715 images. Loading them into memory...\n", + "Images loaded. Generating embeddings...\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Batches: 100%|██████████| 23/23 [06:10<00:00, 16.10s/it]" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Embeddings generated.\n", + "Creating FAISS index with dimension 768...\n", + "FAISS index saved to D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\faiss_index.bin\n", + "Image paths saved to D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\image_paths.json\n", + "Embedding creation and storage process complete.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "# import os\n", + "# import json\n", + "# import numpy as np\n", + "# import faiss\n", + "# from sentence_transformers import SentenceTransformer\n", + "# from PIL import Image\n", + "\n", + "# # ============================== #\n", + "# # DEFINE PATHS #\n", + "# # ============================== #\n", + "# backdrop_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\Backdrops\"\n", + "# sprite_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\sprites\"\n", + "# code_blocks_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\code_blocks\"\n", + "# output_dir = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\"\n", + "\n", + "# image_dirs = [backdrop_images_path, sprite_images_path, code_blocks_path]\n", + "\n", + "# # ======================================= #\n", + "# # INITIALIZE CLIP EMBEDDER (L-14) #\n", + "# # ======================================= #\n", + "# print(\"Loading CLIP model 'clip-ViT-L-14'...\")\n", + "# model = SentenceTransformer('clip-ViT-L-14')\n", + "# print(\"Model loaded successfully.\")\n", + "\n", + "# # ========================================= #\n", + "# # Walk folders to collect all image paths #\n", + "# # ========================================= #\n", + "# folder_image_paths = []\n", + "# for image_dir in image_dirs:\n", + "# for root, _, files in os.walk(image_dir):\n", + "# for fname in files:\n", + "# if fname.lower().endswith((\".png\", \".jpg\", \".jpeg\")):\n", + "# folder_image_paths.append(os.path.join(root, fname))\n", + "\n", + "# if not folder_image_paths:\n", + "# print(\"No images found in the specified directories. Exiting.\")\n", + "# exit()\n", + "\n", + "# print(f\"Found {len(folder_image_paths)} images. Loading them into memory...\")\n", + "\n", + "# # ================================== #\n", + "# # LOAD IMAGES INTO PIL OBJECTS #\n", + "# # ================================== #\n", + "# images_to_embed = []\n", + "# for path in folder_image_paths:\n", + "# try:\n", + "# original_img = Image.open(path)\n", + "# final_img = Image.new(\"RGB\", original_img.size, (255, 255, 255))\n", + "# final_img.paste(original_img, mask=original_img.split()[-1] if original_img.mode == 'RGBA' else None)\n", + "# images_to_embed.append(final_img)\n", + "\n", + "# except Exception as e:\n", + "# print(f\"Error loading and processing image {path}: {e}\")\n", + "# pass\n", + "\n", + "# print(\"Images loaded. Generating embeddings...\")\n", + "\n", + "# # ============================== #\n", + "# # EMBED FOLDER IMAGES #\n", + "# # ============================== #\n", + "# img_features = model.encode(images_to_embed, batch_size=32, show_progress_bar=True)\n", + "# print(\"Embeddings generated.\")\n", + "\n", + "# # ============================== #\n", + "# # NORMALIZE VECTORS FOR IP #\n", + "# # ============================== #\n", + "# # Normalize the vectors to unit length for accurate cosine similarity\n", + "# faiss.normalize_L2(img_features)\n", + "\n", + "# # ============================== #\n", + "# # CREATE AND STORE FAISS INDEX #\n", + "# # ============================== #\n", + "# embedding_dim = img_features.shape[1]\n", + "# print(f\"Creating FAISS index with dimension {embedding_dim}...\")\n", + "\n", + "# # ----------------- CODE UPDATE START (IndexHNSWFlat) -----------------\n", + "\n", + "# # HNSW (Hierarchical Navigable Small World) uses a graph structure.\n", + "# # The primary parameter to tune is 'M', the number of neighbors each node connects to.\n", + "# # A larger 'M' increases accuracy and memory usage. A common starting value is 32 or 64.\n", + "# # We will also use `faiss.METRIC_INNER_PRODUCT` to ensure the index uses inner product,\n", + "# # which is equivalent to cosine similarity after L2 normalization.\n", + "# M = 32\n", + "# index = faiss.IndexHNSWFlat(embedding_dim, M, faiss.METRIC_INNER_PRODUCT)\n", + "\n", + "# # IndexHNSWFlat does not require a separate training step like IndexIVFFlat.\n", + "# # The graph is built as vectors are added.\n", + "# # You can optionally tune 'efConstruction', the exploration depth during index building.\n", + "# # A higher value leads to a more accurate graph but takes longer to build.\n", + "# index.hnsw.efConstruction = 40\n", + "\n", + "# # Add the vectors to the index.\n", + "# index.add(img_features.astype(np.float32))\n", + "\n", + "# # ----------------- CODE UPDATE END -----------------\n", + "\n", + "# index_path = os.path.join(output_dir, \"faiss_index.bin\")\n", + "# faiss.write_index(index, index_path)\n", + "# print(f\"FAISS index saved to {index_path}\")\n", + "\n", + "# # ===================================== #\n", + "# # SAVE IMAGE PATHS (METADATA) #\n", + "# # ===================================== #\n", + "# paths_json_path = os.path.join(output_dir, \"image_paths.json\")\n", + "# with open(paths_json_path, \"w\") as f:\n", + "# json.dump(folder_image_paths, f, indent=2)\n", + "\n", + "# print(f\"Image paths saved to {paths_json_path}\")\n", + "# print(\"Embedding creation and storage process complete.\")\n", + "\n", + "# # ----------------- SEARCH CODE UPDATE START (Optional) -----------------\n", + "# # For search, you can set 'efSearch', the exploration depth during search.\n", + "# # Higher 'efSearch' values result in more accurate but slower searches.\n", + "# # A good starting point is to set it slightly higher than 'k'.\n", + "# # This is a runtime parameter, so it can be changed on the fly.\n", + "# #\n", + "# # Load the index\n", + "# # index = faiss.read_index(index_path)\n", + "# # ...\n", + "# # index.hnsw.efSearch = 20 # for a search with k=5\n", + "# # distances, indices = index.search(query_embedding, k)\n", + "# #\n", + "# # The rest of the search code remains the same.\n", + "# # ----------------- SEARCH CODE UPDATE END -----------------\n", + " \n", + "# # This video provides a great overview of how to choose the right FAISS index for your specific use case. [Choosing the Right FAISS Index](https://www.youtube.com/watch?v=B7wmo_NImgM).\n", + "# # http://googleusercontent.com/youtube_content/2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "085b7869", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading CLIP model 'clip-ViT-L-14'...\n", + "Loading FAISS index and image paths...\n", + "Encoding query: 'D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\test_samp\\figure-1-5.jpg'...\n", + "Searching for the top 5 similar images...\n", + "\n", + "--- Search Results ---\n", + "Rank 1:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Circles.sb3\\c9847be305920807c5597d81576dd0c4.png\n", + " Distance: 220.9455\n", + "--------------------\n", + "Rank 2:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Soccer Ball.sprite3\\5d973d7a3a8be3f3bd6e1cd0f73c32b5.png\n", + " Distance: 224.2444\n", + "--------------------\n", + "Rank 3:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Light.sb3\\4b98c07876ed8997c3762e75790507b4.png\n", + " Distance: 225.5092\n", + "--------------------\n", + "Rank 4:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Soccer Ball.sprite3\\cat_football.png\n", + " Distance: 227.2209\n", + "--------------------\n", + "Rank 5:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Soccer.sb3\\04a63154f04b09494354090f7cc2f1b9.png\n", + " Distance: 233.6558\n", + "--------------------\n", + "Search complete.\n" + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "import faiss\n", + "import numpy as np\n", + "from sentence_transformers import SentenceTransformer\n", + "from PIL import Image\n", + "\n", + "# ============================== #\n", + "# DEFINE PATHS #\n", + "# ============================== #\n", + "index_path = \"D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_agent\\\\blocks\\\\faiss_index.bin\"\n", + "paths_json_path = \"D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_agent\\\\blocks\\\\image_paths.json\"\n", + "\n", + "# ======================================= #\n", + "# LOAD CLIP MODEL AND FAISS INDEX #\n", + "# ======================================= #\n", + "print(\"Loading CLIP model 'clip-ViT-L-14'...\")\n", + "model = SentenceTransformer('clip-ViT-L-14')\n", + "\n", + "print(\"Loading FAISS index and image paths...\")\n", + "# Load the FAISS index from the file.\n", + "index = faiss.read_index(index_path)\n", + "\n", + "# Load the corresponding image paths.\n", + "with open(paths_json_path, \"r\") as f:\n", + " image_paths = json.load(f)\n", + "\n", + "# =================================== #\n", + "# DEFINE QUERY (TEXT OR IMAGE) #\n", + "# =================================== #\n", + "# You can change this to a text string or a path to an image file.\n", + "# For example:\n", + "# query = \"A black and white cat\"\n", + "# query = \"D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_agent\\\\blocks\\\\test_samp\\\\Screenshot 2025-09-15 125111.png\"\n", + "query = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\test_samp\\figure-1-5.jpg\"\n", + "\n", + "# =================================== #\n", + "# ENCODE QUERY TO EMBEDDING #\n", + "# =================================== #\n", + "print(f\"Encoding query: '{query}'...\")\n", + "\n", + "# Check if the query is a file path and, if so, load the image.\n", + "if os.path.exists(query) and os.path.isfile(query):\n", + " # This block handles image file queries.\n", + " try:\n", + " # Load the image using PIL (Pillow) and encode it.\n", + " pil_image = Image.open(query)\n", + " # Sentence-Transformers' model.encode can directly handle a PIL Image object.\n", + " query_embedding = model.encode(pil_image, convert_to_tensor=True).cpu().numpy().reshape(1, -1)\n", + " except Exception as e:\n", + " print(f\"Error loading image from path: {e}\")\n", + " query_embedding = None\n", + "else:\n", + " # This block handles text queries.\n", + " query_embedding = model.encode(query, convert_to_tensor=True).cpu().numpy().reshape(1, -1)\n", + "\n", + "# Check if the embedding was successfully created.\n", + "if query_embedding is None:\n", + " print(\"Could not create an embedding for the query. Exiting.\")\n", + " exit()\n", + "\n", + "# Ensure the embedding is in float32 format for FAISS.\n", + "query_embedding = query_embedding.astype(np.float32)\n", + "\n", + "# ============================== #\n", + "# PERFORM FAISS SEARCH #\n", + "# ============================== #\n", + "k = 5 # Number of similar results to retrieve\n", + "print(f\"Searching for the top {k} similar images...\")\n", + "\n", + "# Use the FAISS index to search for the k nearest neighbors.\n", + "distances, indices = index.search(query_embedding, k)\n", + "\n", + "# The 'distances' array contains the L2 distances, and 'indices' contains\n", + "# the indices of the found vectors in the original dataset.\n", + "\n", + "# ============================== #\n", + "# DISPLAY RESULTS #\n", + "# ============================== #\n", + "print(\"\\n--- Search Results ---\")\n", + "for i, idx in enumerate(indices[0]):\n", + " # The 'idx' corresponds to the index in our original image_paths list.\n", + " path = image_paths[idx]\n", + " distance = distances[0][i]\n", + " print(f\"Rank {i+1}:\")\n", + " print(f\" Path: {path}\")\n", + " print(f\" Distance: {distance:.4f}\")\n", + " print(\"-\" * 20)\n", + "\n", + "print(\"Search complete.\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "c725e6e9", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading CLIP model 'clip-ViT-L-14'...\n", + "Loading FAISS index and image paths...\n", + "Encoding query: 'D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\test_samp\\figure-2-6.jpg'...\n", + "Searching for the top 5 similar images...\n", + "\n", + "--- Search Results ---\n", + "Rank 1:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\sprites\\Cat.sprite3\\0fb9be3e8397c983338cb71dc84d0b25.png\n", + " Similarity Score: 0.7921\n", + "--------------------\n", + "Rank 2:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\sprites\\Cat.sprite3\\bcf454acf82e4504149f7ffe07081dbc.png\n", + " Similarity Score: 0.7793\n", + "--------------------\n", + "Rank 3:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\sprites\\Cat 2.sprite3\\7499cf6ec438d0c7af6f896bc6adc294.png\n", + " Similarity Score: 0.7506\n", + "--------------------\n", + "Rank 4:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\sprites\\Robot.sprite3\\36d1098b880dbe47e58d93e7b2842381.png\n", + " Similarity Score: 0.7350\n", + "--------------------\n", + "Rank 5:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks_v1\\sprites\\Jouvi Dance.sprite3\\01dd2f553c7262329ebaba2516e3a2b1.png\n", + " Similarity Score: 0.7269\n", + "--------------------\n", + "Search complete.\n" + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "import faiss\n", + "import numpy as np\n", + "from sentence_transformers import SentenceTransformer\n", + "from PIL import Image\n", + "\n", + "# ============================== #\n", + "# DEFINE PATHS #\n", + "# ============================== #\n", + "index_path = \"D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_agent\\\\blocks\\\\faiss_index.bin\"\n", + "paths_json_path = \"D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_agent\\\\blocks\\\\image_paths.json\"\n", + "\n", + "# ======================================= #\n", + "# LOAD CLIP MODEL AND FAISS INDEX #\n", + "# ======================================= #\n", + "print(\"Loading CLIP model 'clip-ViT-L-14'...\")\n", + "model = SentenceTransformer('clip-ViT-L-14')\n", + "\n", + "print(\"Loading FAISS index and image paths...\")\n", + "# Load the FAISS index from the file.\n", + "index = faiss.read_index(index_path)\n", + "\n", + "# Load the corresponding image paths.\n", + "with open(paths_json_path, \"r\") as f:\n", + " image_paths = json.load(f)\n", + "\n", + "# =================================== #\n", + "# DEFINE QUERY (TEXT OR IMAGE) #\n", + "# =================================== #\n", + "# You can change this to a text string or a path to an image file.\n", + "# For example:\n", + "# query = \"A black and white cat\"\n", + "# query = \"D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_agent\\\\blocks\\\\test_samp\\\\Screenshot 2025-09-15 125111.png\"\n", + "query = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\test_samp\\figure-2-6.jpg\"\n", + "\n", + "# =================================== #\n", + "# ENCODE QUERY TO EMBEDDING #\n", + "# =================================== #\n", + "print(f\"Encoding query: '{query}'...\")\n", + "\n", + "# Check if the query is a file path and, if so, load the image.\n", + "if os.path.exists(query) and os.path.isfile(query):\n", + " # This block handles image file queries.\n", + " try:\n", + " # Load the image using PIL (Pillow) and encode it.\n", + " pil_image = Image.open(query)\n", + " # Sentence-Transformers' model.encode can directly handle a PIL Image object.\n", + " query_embedding = model.encode(pil_image, convert_to_tensor=True).cpu().numpy().reshape(1, -1)\n", + " except Exception as e:\n", + " print(f\"Error loading image from path: {e}\")\n", + " query_embedding = None\n", + "else:\n", + " # This block handles text queries.\n", + " query_embedding = model.encode(query, convert_to_tensor=True).cpu().numpy().reshape(1, -1)\n", + "\n", + "# Check if the embedding was successfully created.\n", + "if query_embedding is None:\n", + " print(\"Could not create an embedding for the query. Exiting.\")\n", + " exit()\n", + "\n", + "# Normalize the query vector before searching\n", + "faiss.normalize_L2(query_embedding)\n", + "query_embedding = query_embedding.astype(np.float32)\n", + "\n", + "# ============================== #\n", + "# PERFORM FAISS SEARCH #\n", + "# ============================== #\n", + "k = 5 # Number of similar results to retrieve\n", + "print(f\"Searching for the top {k} similar images...\")\n", + "\n", + "# Use the FAISS index to search for the k nearest neighbors.\n", + "distances, indices = index.search(query_embedding, k)\n", + "\n", + "# The 'distances' array now contains the similarity scores, and 'indices' contains\n", + "# the indices of the found vectors in the original dataset.\n", + "\n", + "# ============================== #\n", + "# DISPLAY RESULTS #\n", + "# ============================== #\n", + "print(\"\\n--- Search Results ---\")\n", + "for i, idx in enumerate(indices[0]):\n", + " # The 'idx' corresponds to the index in our original image_paths list.\n", + " path = image_paths[idx]\n", + " # The FAISS IP index returns similarity scores, not distances.\n", + " similarity_score = distances[0][i]\n", + " print(f\"Rank {i+1}:\")\n", + " print(f\" Path: {path}\")\n", + " print(f\" Similarity Score: {similarity_score:.4f}\")\n", + " print(\"-\" * 20)\n", + "\n", + "print(\"Search complete.\")" + ] + }, + { + "cell_type": "markdown", + "id": "ac2a25da", + "metadata": {}, + "source": [ + "## dinov2small modifications" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e9533f8f", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading DINOv2 model 'facebook/dinov2-small'...\n", + "DINOv2 model loaded successfully.\n", + "Found 54 images. Generating embeddings...\n", + "Embeddings generated.\n", + "DINOv2 embeddings saved to D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\dinov2_embeddings.json\n", + "Embedding creation and storage process complete.\n" + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "import numpy as np\n", + "import torch\n", + "from PIL import Image\n", + "from transformers import AutoImageProcessor, AutoModel\n", + "\n", + "# ============================== #\n", + "# DEFINE PATHS #\n", + "# ============================== #\n", + "backdrop_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\"\n", + "sprite_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\"\n", + "code_blocks_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\"\n", + "output_dir = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\"\n", + "\n", + "image_dirs = [backdrop_images_path, sprite_images_path, code_blocks_path]\n", + "\n", + "# ======================================= #\n", + "# INITIALIZE DINOv2 EMBEDDER #\n", + "# ======================================= #\n", + "print(\"Loading DINOv2 model 'facebook/dinov2-small'...\")\n", + "# --- Config (tune threads as needed) ---\n", + "DINOV2_MODEL = \"facebook/dinov2-small\"\n", + "DEVICE = torch.device(\"cpu\")\n", + "torch.set_num_threads(4) # tune for your CPU\n", + "\n", + "# --- Globals for single-shot model load ---\n", + "dinov2_processor = AutoImageProcessor.from_pretrained(DINOV2_MODEL)\n", + "dinov2_model = AutoModel.from_pretrained(DINOV2_MODEL)\n", + "dinov2_model.to(DEVICE)\n", + "print(\"DINOv2 model loaded successfully.\")\n", + "\n", + "def get_dinov2_embedding(image_path):\n", + " \"\"\"\n", + " Loads an image from a path, composites it onto a white background,\n", + " processes it, and returns the DINOv2 embedding.\n", + " \"\"\"\n", + " try:\n", + " # Open the original image\n", + " original_img = Image.open(image_path)\n", + "\n", + " # Create a new image with a white background in RGB mode\n", + " final_img = Image.new(\"RGB\", original_img.size, (255, 255, 255))\n", + "\n", + " # Paste the original image onto the white background.\n", + " # If the original has an alpha channel, use it as a mask.\n", + " if original_img.mode == 'RGBA':\n", + " final_img.paste(original_img, mask=original_img.split()[-1])\n", + " else:\n", + " final_img.paste(original_img)\n", + " \n", + " # Process the final image with DINOv2\n", + " inputs = dinov2_processor(images=final_img, return_tensors=\"pt\").to(DEVICE)\n", + " with torch.no_grad():\n", + " outputs = dinov2_model(**inputs)\n", + " \n", + " # We use the CLS token embedding from the last layer.\n", + " embedding = outputs.last_hidden_state[:, 0, :].squeeze(0).cpu().numpy()\n", + " \n", + " return embedding\n", + " except Exception as e:\n", + " print(f\"Error processing image {image_path}: {e}\")\n", + " return None\n", + "\n", + "# ========================================= #\n", + "# Walk folders to collect all image paths #\n", + "# ========================================= #\n", + "folder_image_paths = []\n", + "for image_dir in image_dirs:\n", + " for root, _, files in os.walk(image_dir):\n", + " for fname in files:\n", + " if fname.lower().endswith((\".png\", \".jpg\", \".jpeg\")):\n", + " folder_image_paths.append(os.path.join(root, fname))\n", + "\n", + "if not folder_image_paths:\n", + " print(\"No images found in the specified directories. Exiting.\")\n", + " exit()\n", + "\n", + "print(f\"Found {len(folder_image_paths)} images. Generating embeddings...\")\n", + "\n", + "# ================================== #\n", + "# EMBED AND STORE IN A DICT #\n", + "# ================================== #\n", + "embeddings_dict = {}\n", + "for path in folder_image_paths:\n", + " embedding = get_dinov2_embedding(path)\n", + " if embedding is not None:\n", + " # Normalize the vector for cosine similarity\n", + " norm_embedding = embedding / np.linalg.norm(embedding)\n", + " # Convert numpy array to list for JSON serialization\n", + " embeddings_dict[path] = norm_embedding.tolist()\n", + " \n", + "print(\"Embeddings generated.\")\n", + "\n", + "# ============================== #\n", + "# SAVE EMBEDDINGS TO JSON #\n", + "# ============================== #\n", + "embeddings_json_path = os.path.join(output_dir, \"dinov2_embeddings.json\")\n", + "with open(embeddings_json_path, \"w\") as f:\n", + " json.dump(embeddings_dict, f, indent=2)\n", + "\n", + "print(f\"DINOv2 embeddings saved to {embeddings_json_path}\")\n", + "print(\"Embedding creation and storage process complete.\")" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "736be8cc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "--- Starting Search Process ---\n", + "Loading DINOv2 embeddings and image paths from JSON...\n", + "Encoding query image: 'D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\test_samp\\image (3).png'...\n", + "Searching for the top 5 similar images...\n", + "\n", + "--- Search Results ---\n", + "Rank 1:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Blue Sky.sb3\\e7c147730f19d284bcd7b3f00af19bb6.png\n", + " Similarity Score: 0.3684\n", + "--------------------\n", + "Rank 2:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\\static_white.png\n", + " Similarity Score: 0.2349\n", + "--------------------\n", + "Rank 3:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\\script4.JPG\n", + " Similarity Score: 0.1705\n", + "--------------------\n", + "Rank 4:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Jungle.sb3\\f4f908da19e2753f3ed679d7b37650ca.png\n", + " Similarity Score: 0.1499\n", + "--------------------\n", + "Rank 5:\n", + " Path: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Colorful City.sb3\\04d18ddd1b85f0ea30beb14b8da49f60.png\n", + " Similarity Score: 0.1458\n", + "--------------------\n", + "Search complete.\n" + ] + } + ], + "source": [ + "# ================================================================= #\n", + "# PART 2: SIMILARITY SEARCH FROM THE JSON FILE #\n", + "# ================================================================= #\n", + "\n", + "def cosine_similarity(vec1, vec2):\n", + " \"\"\"\n", + " Calculates the cosine similarity between two numpy vectors.\n", + " Assumes vectors are already normalized.\n", + " \"\"\"\n", + " return np.dot(vec1, vec2)\n", + "\n", + "# ============================== #\n", + "# LOAD EMBEDDINGS FROM JSON #\n", + "# ============================== #\n", + "print(\"\\n--- Starting Search Process ---\")\n", + "print(\"Loading DINOv2 embeddings and image paths from JSON...\")\n", + "with open(embeddings_json_path, \"r\") as f:\n", + " embeddings_dict = json.load(f)\n", + "\n", + "# Convert list to numpy array for calculation\n", + "image_paths = list(embeddings_dict.keys())\n", + "image_embeddings = np.array(list(embeddings_dict.values()))\n", + "\n", + "# =================================== #\n", + "# DEFINE QUERY (IMAGE) #\n", + "# =================================== #\n", + "query = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\test_samp\\image (3).png\"\n", + "print(f\"Encoding query image: '{query}'...\")\n", + "\n", + "# =================================== #\n", + "# ENCODE QUERY TO EMBEDDING #\n", + "# =================================== #\n", + "query_embedding = get_dinov2_embedding(query)\n", + "if query_embedding is None:\n", + " print(\"Could not create an embedding for the query. Exiting.\")\n", + " exit()\n", + "\n", + "# Normalize the query embedding for cosine similarity\n", + "query_embedding = query_embedding / np.linalg.norm(query_embedding)\n", + "\n", + "# ============================== #\n", + "# PERFORM MANUAL SEARCH #\n", + "# ============================== #\n", + "k = 5 # Number of similar results to retrieve\n", + "print(f\"Searching for the top {k} similar images...\")\n", + "\n", + "similarities = []\n", + "for i, stored_embedding in enumerate(image_embeddings):\n", + " similarity = cosine_similarity(query_embedding, stored_embedding)\n", + " similarities.append((similarity, image_paths[i]))\n", + "\n", + "# Sort by similarity in descending order\n", + "similarities.sort(key=lambda x: x[0], reverse=True)\n", + "\n", + "# ============================== #\n", + "# DISPLAY RESULTS #\n", + "# ============================== #\n", + "print(\"\\n--- Search Results ---\")\n", + "for i in range(min(k, len(similarities))):\n", + " similarity, path = similarities[i]\n", + " print(f\"Rank {i+1}:\")\n", + " print(f\" Path: {path}\")\n", + " # The FAISS 'distance' for IndexFlatIP is -1 * dot product, so we invert the sign.\n", + " # Here we display the direct cosine similarity score [0, 1]\n", + " print(f\" Similarity Score: {similarity:.4f}\")\n", + " print(\"-\" * 20)\n", + "\n", + "print(\"Search complete.\")" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "6cc5b3a7", + "metadata": {}, + "outputs": [], + "source": [ + "import torch\n", + "from transformers import AutoImageProcessor, AutoModel\n", + "import numpy as np\n", + "from PIL import Image\n", + "from pathlib import Path\n", + "from io import BytesIO\n", + "import json\n", + "\n", + "# --- Config (tune threads as needed) ---\n", + "DINOV2_MODEL = \"facebook/dinov2-small\" # small = best CPU latency/quality tradeoff\n", + "DEVICE = torch.device(\"cpu\")\n", + "torch.set_num_threads(4) # tune for your CPU\n", + "\n", + "# --- Globals for single-shot model load ---\n", + "_dinov2_processor = None\n", + "_dinov2_model = None\n", + "\n", + "\n", + "def init_dinov2(model_name: str = DINOV2_MODEL, device: torch.device = DEVICE):\n", + " \"\"\"\n", + " Lazy-initialize DINOv2 processor & model (call once before embedding).\n", + " \"\"\"\n", + " global _dinov2_processor, _dinov2_model\n", + " if _dinov2_processor is None or _dinov2_model is None:\n", + " _dinov2_processor = AutoImageProcessor.from_pretrained(model_name)\n", + " _dinov2_model = AutoModel.from_pretrained(model_name)\n", + " _dinov2_model.eval().to(device)\n", + "\n", + "\n", + "def embed_bytesio_list(bytesio_list, batch_size: int = 8):\n", + " \"\"\"\n", + " Accepts a list of BytesIO objects (each contains an image).\n", + " Returns: np.ndarray shape (N, D) of L2-normalized embeddings (dtype float32).\n", + " \"\"\"\n", + " if _dinov2_processor is None or _dinov2_model is None:\n", + " init_dinov2()\n", + "\n", + " imgs = []\n", + " for b in bytesio_list:\n", + " with Image.open(b) as original_img:\n", + " # Create a new image with a white background in RGB mode\n", + " final_img = Image.new(\"RGB\", original_img.size, (255, 255, 255))\n", + " # Paste the original image onto the white background, using the alpha channel as a mask if it exists\n", + " if original_img.mode == 'RGBA':\n", + " final_img.paste(original_img, mask=original_img.split()[-1])\n", + " else:\n", + " final_img.paste(original_img)\n", + " imgs.append(final_img.copy())\n", + "\n", + " embs = []\n", + " for i in range(0, len(imgs), batch_size):\n", + " batch = imgs[i: i + batch_size]\n", + " inputs = _dinov2_processor(images=batch, return_tensors=\"pt\")\n", + " inputs = {k: v.to(DEVICE) for k, v in inputs.items()}\n", + " with torch.no_grad():\n", + " out = _dinov2_model(**inputs)\n", + " cls = out.last_hidden_state[:, 0, :] # (B, D)\n", + " cls = torch.nn.functional.normalize(cls, p=2, dim=1)\n", + " embs.append(cls.cpu().numpy())\n", + "\n", + " if not embs:\n", + " return np.zeros((0, _dinov2_model.config.hidden_size), dtype=np.float32)\n", + "\n", + " return np.vstack(embs).astype(np.float32)\n", + "\n", + "\n", + "def l2_normalize_rows(a: np.ndarray, eps: float = 1e-12) -> np.ndarray:\n", + " \"\"\"\n", + " Row-wise L2 normalization for numpy arrays.\n", + " \"\"\"\n", + " norm = np.linalg.norm(a, axis=1, keepdims=True)\n", + " return a / (norm + eps)\n", + "\n", + "\n", + "def regenerate_reference_embeddings(folder_image_paths, out_path):\n", + " init_dinov2()\n", + "\n", + " bytes_list = []\n", + " valid_paths = []\n", + "\n", + " for p in folder_image_paths:\n", + " p = Path(p) # convert str to Path\n", + " if not p.exists():\n", + " print(f\"Missing file: {p}\")\n", + " continue\n", + " with open(p, \"rb\") as f:\n", + " b = BytesIO(f.read())\n", + " b.seek(0)\n", + " bytes_list.append(b)\n", + " valid_paths.append(p)\n", + "\n", + " embs = embed_bytesio_list(bytes_list, batch_size=8) # (M, D)\n", + "\n", + " out_json = []\n", + " for i, p in enumerate(valid_paths):\n", + " out_json.append({\n", + " \"path\": str(p), # ensure string in JSON\n", + " \"embeddings\": embs[i].tolist()\n", + " })\n", + "\n", + " with open(out_path, \"w\") as f:\n", + " json.dump(out_json, f, indent=2)\n", + "\n", + " print(f\"Embeddings for {len(valid_paths)} images saved to {out_path}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "id": "a2f884ed", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found 54 images. Generating embeddings...\n", + "Embeddings for 54 images saved to D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\dinov2_embeddings.json\n", + "Embedding generation and saving complete.\n" + ] + } + ], + "source": [ + "if __name__ == \"__main__\":\n", + " from pathlib import Path\n", + " import os\n", + "\n", + " # Define paths\n", + " base_dir = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\"\n", + " backdrop_images_path = Path(base_dir) / \"Backdrops\"\n", + " sprite_images_path = Path(base_dir) / \"sprites\"\n", + " code_blocks_path = Path(base_dir) / \"code_blocks\"\n", + " output_dir = Path(base_dir)\n", + " output_json_path = output_dir / \"dinov2_embeddings.json\"\n", + "\n", + " # Walk folders to collect all image paths\n", + " folder_image_paths = []\n", + " image_dirs = [backdrop_images_path, sprite_images_path, code_blocks_path]\n", + " for image_dir in image_dirs:\n", + " if image_dir.exists():\n", + " for root, _, files in os.walk(image_dir):\n", + " for fname in files:\n", + " if fname.lower().endswith((\".png\", \".jpg\", \".jpeg\")):\n", + " folder_image_paths.append(os.path.join(root, fname))\n", + "\n", + " if not folder_image_paths:\n", + " print(\"No images found in the specified directories. Exiting.\")\n", + " else:\n", + " print(f\"Found {len(folder_image_paths)} images. Generating embeddings...\")\n", + " regenerate_reference_embeddings(folder_image_paths, output_json_path)\n", + " print(\"Embedding generation and saving complete.\")" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "53ab2b8f", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "# Prepare manipulated sprite JSON structure\n", + "manipulated_json = {}\n", + "img_elements = []\n", + "# { changes: \"pdf_stream\" in place of \"pdf_path\"\n", + "def extract_images_from_pdf(pdf_stream: io.BytesIO):\n", + " ''' Extract images from PDF and generate structured sprite JSON '''\n", + " try:\n", + " # {\n", + " # pdf_path = Path(pdf_path)\n", + " # pdf_filename = pdf_path.stem # e.g., \"scratch_crab\"\n", + " # pdf_dir_path = str(pdf_path.parent).replace(\"/\", \"\\\\\")\n", + " # print(\"-------------------------------pdf_filename-------------------------------\",pdf_filename)\n", + " # print(\"-------------------------------pdf_dir_path-------------------------------\",pdf_dir_path)\n", + "\n", + " if isinstance(pdf_stream, io.BytesIO):\n", + " # use a random ID since there's no filename\n", + " pdf_id = uuid.uuid4().hex \n", + " else:\n", + " pdf_id = os.path.splitext(os.path.basename(pdf_stream))[0]\n", + " \n", + " # extracted_image_subdir = DETECTED_IMAGE_DIR / pdf_filename\n", + " # json_subdir = JSON_DIR / pdf_filename\n", + " # extracted_image_subdir.mkdir(parents=True, exist_ok=True)\n", + " # json_subdir.mkdir(parents=True, exist_ok=True)\n", + " # print(\"-------------------------------extracted_image_subdir-------------------------------\",extracted_image_subdir)\n", + " # print(\"-------------------------------json_subdir-------------------------------\",json_subdir)\n", + " # # Output paths (now using Path objects directly)\n", + " # output_json_path = json_subdir / \"extracted.json\"\n", + " # final_json_path = json_subdir / \"extracted_sprites.json\" # Path to extracted_sprites.json\n", + " # final_json_path_2 = json_subdir / \"extracted_sprites_2.json\"\n", + " # print(\"-------------------------------output_json_path-------------------------------\",output_json_path)\n", + " # print(\"-------------------------------final_json_path-------------------------------\",final_json_path)\n", + " # print(\"-------------------------------final_json_path_2-------------------------------\",final_json_path_2)\n", + " \n", + " # }\n", + " try:\n", + " ocr_lang = \"eng\"\n", + " elements = partition_pdf(\n", + " # filename=str(pdf_path), # partition_pdf might expect a string\n", + " file=pdf_stream, # 'file=', inplace of 'filename'\n", + " strategy=\"hi_res\",\n", + " extract_image_block_types=[\"Image\"],\n", + " hi_res_model_name=\"yolox\",\n", + " extract_image_block_to_payload=True,\n", + " ocr_languages=ocr_lang,\n", + " )\n", + " print(f\"ELEMENTS\")\n", + " except Exception as e:\n", + " raise RuntimeError(\n", + " f\"❌ Failed to extract images from PDF: {str(e)}\")\n", + "\n", + " file_elements = [element.to_dict() for element in elements]\n", + "\n", + " #{\n", + " # try:\n", + " # with open(output_json_path, \"w\") as f:\n", + " # json.dump([element.to_dict()\n", + " # for element in elements], f, indent=4)\n", + " # except Exception as e:\n", + " # raise RuntimeError(f\"❌ Failed to write extracted.json: {str(e)}\")\n", + "\n", + " # try:\n", + " # # Display extracted images\n", + " # with open(output_json_path, 'r') as file:\n", + " # file_elements = json.load(file)\n", + " # except Exception as e:\n", + " # raise RuntimeError(f\"❌ Failed to read extracted.json: {str(e)}\") \n", + " # }\n", + "\n", + " sprite_count = 1\n", + " for el in file_elements:\n", + " img_b64 = el[\"metadata\"].get(\"image_base64\")\n", + " if not img_b64:\n", + " continue \n", + " \n", + " manipulated_json[f\"Sprite {sprite_count}\"] = {\n", + " # \"id\":auto_id,\n", + " # \"name\": name,\n", + " \"base64\": el[\"metadata\"][\"image_base64\"],\n", + " \"file-path\": pdf_id,\n", + " # \"description\": description\n", + " }\n", + " sprite_count += 1\n", + " return manipulated_json\n", + " except Exception as e:\n", + " raise RuntimeError(f\"❌ Error in extract_images_from_pdf: {str(e)}\")\n", + "\n", + "# def similarity_matching(input_json_path: str, project_folder: str) -> str:\n", + "def similarity_matching(sprites_data: str, project_folder: str) -> str:\n", + " logger.info(\"🔍 Running similarity matching…\")\n", + " os.makedirs(project_folder, exist_ok=True)\n", + "\n", + " # ----------------------------------------\n", + " # CHANGED: define normalized base-paths so startswith() checks work\n", + " backdrop_base_path = os.path.normpath(str(BACKDROP_DIR))\n", + " sprite_base_path = os.path.normpath(str(SPRITE_DIR))\n", + " code_blocks_path = os.path.normpath(str(CODE_BLOCKS_DIR))\n", + " # ----------------------------------------\n", + "\n", + " project_json_path = os.path.join(project_folder, \"project.json\")\n", + "\n", + " # ==============================\n", + " # READ SPRITE METADATA\n", + " # ==============================\n", + " # with open(input_json_path, 'r') as f:\n", + " # sprites_data = json.load(f)\n", + "\n", + " sprite_ids, sprite_base64 = [], []\n", + " for sid, sprite in sprites_data.items():\n", + " sprite_ids.append(sid)\n", + " # texts.append(\"This is \" + sprite.get(\"description\", sprite.get(\"name\", \"\")))\n", + " sprite_base64.append(sprite[\"base64\"])\n", + "\n", + " sprite_images_bytes = []\n", + " for b64 in sprite_base64:\n", + " img = Image.open(BytesIO(base64.b64decode(b64.split(\",\")[-1]))).convert(\"RGB\")\n", + " buffer = BytesIO()\n", + " img.save(buffer, format=\"PNG\")\n", + " buffer.seek(0)\n", + " sprite_images_bytes.append(buffer)\n", + " \n", + " # =========================================\n", + " # Build the list of all candidate images\n", + " # =========================================\n", + " folder_image_paths = [SPRITE_DIR / \"Abby.sprite3\" / \"34a175600dc009a521eb46fdbbbeeb67.png\"\n", + ",SPRITE_DIR / \"Abby.sprite3\" / \"45de34b47a2ce22f6f5d28bb35a44ff5.png\"\n", + ",SPRITE_DIR / \"Abby.sprite3\" / \"809d9b47347a6af2860e7a3a35bce057.png\"\n", + ",SPRITE_DIR / \"Abby.sprite3\" / \"920f14335615fff9b8c55fccb8971984.png\"\n", + ",SPRITE_DIR / \"Amon.sprite3\" / \"60f720956ab1840431dcf0616ce98f14.png\"\n", + ",SPRITE_DIR / \"Andie.sprite3\" / \"b36584db82bdd45014430aa918461ca0.png\"\n", + ",SPRITE_DIR / \"Andie.sprite3\" / \"b3fc774e753fef520fb544127a48554b.png\"\n", + ",SPRITE_DIR / \"Andie.sprite3\" / \"d92aaf6cf44921905d51ca4a10a4f3d6.png\"\n", + ",SPRITE_DIR / \"Andie.sprite3\" / \"ded71c8a0f39852178f1695b622c2d89.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"105f4f3d260dcb8bea02ea9ee5d18cf4.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"2d208a34e74fdce9dab9d4c585dcfa2b.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"3948aad16f8169c013c956dd152a09a6.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"4931a363e3e4efa20230f6ff2991c6b4.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"62c50c90535b64f2ae130a5c680ddcb4.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"7bb9c790b02231e1272701167c26b17a.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"84c5e22b4303c7c1fb707125706c9aaa.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"b7693bd6250d4411ee622b67f8025924.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"ca27e001a263ee6b5852508f39d021db.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"d86bb27b4f8d7b70c39c96f29c6943b4.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"db6c03113f71b91f22a9f3351f90e5bf.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"e3698b76cb0864df2fbaba80e6bd8067.png\"\n", + ",SPRITE_DIR / \"Anina Dance.sprite3\" / \"ed90e8b7a05c1552194af597ac0637cd.png\"\n", + ",SPRITE_DIR / \"Apple.sprite3\" / \"3826a4091a33e4d26f87a2fac7cf796b.png\"\n", + ",SPRITE_DIR / \"Arrow1.sprite3\" / \"65b8e977641885010a10a46512fb95b4.png\"\n", + ",SPRITE_DIR / \"Arrow1.sprite3\" / \"70ffa0bae8693418459f21f370584f6d.png\"\n", + ",SPRITE_DIR / \"Arrow1.sprite3\" / \"be8fcd10da0b082f8d4775088ef7bd52.png\"\n", + ",SPRITE_DIR / \"Arrow1.sprite3\" / \"dafcdfda65af14e172809984710f31a9.png\"\n", + ",SPRITE_DIR / \"Avery Walking.sprite3\" / \"3a935fe75ac999e22b93d06b3081a271.png\"\n", + ",SPRITE_DIR / \"Avery Walking.sprite3\" / \"448e54fb14b13d492885fc247e76b7f4.png\"\n", + ",SPRITE_DIR / \"Avery Walking.sprite3\" / \"8f439476a738251043d488d7a4bc6870.png\"\n", + ",SPRITE_DIR / \"Avery Walking.sprite3\" / \"dc6a584704c09a3fbafb9825635a9fd4.png\"\n", + ",SPRITE_DIR / \"Avery.sprite3\" / \"944385ea927e8f9d72b9e19620487999.png\"\n", + ",SPRITE_DIR / \"Avery.sprite3\" / \"f52bde34d8027aab14b53f228fe5cc14.png\"\n", + ",SPRITE_DIR / \"Ball.sprite3\" / \"1c44b7494dec047371f74c705f1d99fc.png\"\n", + ",SPRITE_DIR / \"Ball.sprite3\" / \"3c6241985b581284ec191f9d1deffde8.png\"\n", + ",SPRITE_DIR / \"Ball.sprite3\" / \"ad7dc51cafd73e8279073e33b0eab335.png\"\n", + ",SPRITE_DIR / \"Ball.sprite3\" / \"db144b2a19f4f1ab31e30d58f00447dc.png\"\n", + ",SPRITE_DIR / \"Ball.sprite3\" / \"f221a2edf87aff3615c0c003e616b31b.png\"\n", + ",SPRITE_DIR / \"Ballerina.sprite3\" / \"4ccb1752a43f48aafe490c9c08e58c27.png\"\n", + ",SPRITE_DIR / \"Ballerina.sprite3\" / \"5197d3778baf55da6b81b3ada1e10021.png\"\n", + ",SPRITE_DIR / \"Ballerina.sprite3\" / \"5aae21aee33c3f1ae943af5ea11254bf.png\"\n", + ",SPRITE_DIR / \"Ballerina.sprite3\" / \"fc02bf591dd3d91eeeb50c7424d08274.png\"\n", + ",SPRITE_DIR / \"Balloon1.sprite3\" / \"63e5aea255610f9fdf0735e1e9a55a5c.png\"\n", + ",SPRITE_DIR / \"Balloon1.sprite3\" / \"a2516ac2b8d7a348194908e630387ea9.png\"\n", + ",SPRITE_DIR / \"Balloon1.sprite3\" / \"d7974f9e15000c16222f94ee32d8227a.png\"\n", + ",SPRITE_DIR / \"Bananas.sprite3\" / \"e5d3d3eb61797f5999732a8f5efead24.png\"\n", + ",SPRITE_DIR / \"Baseball.sprite3\" / \"74e08fc57820f925c7689e7b754c5848.png\"\n", + ",SPRITE_DIR / \"Basketball.sprite3\" / \"6b0b2aaa12d655e96b5b34e92d9fbd4f.png\"\n", + ",SPRITE_DIR / \"Bat.sprite3\" / \"4e4ced87ed37ee66c758bba077e0eae6.png\"\n", + ",SPRITE_DIR / \"Bat.sprite3\" / \"60f5bfce5d9b11bfcd199a6aa5454b3f.png\"\n", + ",SPRITE_DIR / \"Bat.sprite3\" / \"698c2a48e774f9959d57c9618b156c20.png\"\n", + ",SPRITE_DIR / \"Bat.sprite3\" / \"bc6dd12fc9e407c7774959cdf427f8b5.png\"\n", + ",SPRITE_DIR / \"Batter.sprite3\" / \"592ee9ab2aeefe65cb4fb95fcd046f33.png\"\n", + ",SPRITE_DIR / \"Batter.sprite3\" / \"9d193bef6e3d6d8eba6d1470b8bf9351.png\"\n", + ",SPRITE_DIR / \"Batter.sprite3\" / \"baseball_sprite_motion_1.png\"\n", + ",SPRITE_DIR / \"Batter.sprite3\" / \"bd4fc003528acfa847e45ff82f346eee.png\"\n", + ",SPRITE_DIR / \"Batter.sprite3\" / \"fdfde4bcbaca0f68e83fdf3f4ef0c660.png\"\n", + ",SPRITE_DIR / \"Beachball.sprite3\" / \"5198b5a03ebae60698e0906f59a5fc15.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"0a38a860f2e573b8dc5b09f390d30fbd.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"36d06aa23c684fc996952adb0e76e6b4.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"6d4d06e3f4cd0c9455b777b9a40782b6.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"6d50c5fe63ab5f77d10144a68ca535a6.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"7453709bef16e33e6f989aee14d7fc07.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"d2a5f124f988def1d214e6d0813a48f3.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"e531b307381c2aa148be4ccc36db0333.png\"\n", + ",SPRITE_DIR / \"Bear-walking.sprite3\" / \"f36c80d2e731be95df7ec6d07f89fa00.png\"\n", + ",SPRITE_DIR / \"Bear.sprite3\" / \"6f303e972f33fcb7ef36d0d8012d0975.png\"\n", + ",SPRITE_DIR / \"Bear.sprite3\" / \"bear_motion_2.png\"\n", + ",SPRITE_DIR / \"Bear.sprite3\" / \"deef1eaa96d550ae6fc11524a1935024.png\"\n", + ",SPRITE_DIR / \"Beetle.sprite3\" / \"46d0dfd4ae7e9bfe3a6a2e35a4905eae.png\"\n", + ",SPRITE_DIR / \"Bell.sprite3\" / \"8c0234fe1bfd36f5a72e975fbbc18bfd.png\"\n", + ",SPRITE_DIR / \"Ben.sprite3\" / \"165d993c30dfdb9e829d0d98867d7826.png\"\n", + ",SPRITE_DIR / \"Ben.sprite3\" / \"2cd77b8a9961e7ad4da905e7731b7c1b.png\"\n", + ",SPRITE_DIR / \"Ben.sprite3\" / \"9f9f88aea3457084d8d734040b0b9067.png\"\n", + ",SPRITE_DIR / \"Ben.sprite3\" / \"acc208e29f0422c2bcffa3b8873abc63.png\"\n", + ",SPRITE_DIR / \"Block-A.sprite3\" / \"ef3b01f6fc1ffa1270fbbf057f7ded42.png\"\n", + ",SPRITE_DIR / \"Block-B.sprite3\" / \"1dc05fbaa37a6b41ffff459d0a776989.png\"\n", + ",SPRITE_DIR / \"Block-C.sprite3\" / \"43090c4b423c977041542ce12017fda0.png\"\n", + ",SPRITE_DIR / \"Block-D.sprite3\" / \"1fb3db31500d6f7da662e825157920fa.png\"\n", + ",SPRITE_DIR / \"Block-E.sprite3\" / \"240aacc04444cef3b2ef8cfaf0dae479.png\"\n", + ",SPRITE_DIR / \"Block-F.sprite3\" / \"d88d750ce848d7dbeeca3f02249350e2.png\"\n", + ",SPRITE_DIR / \"Block-G.sprite3\" / \"989c76ae7f8c2e42ebeacdda961061ca.png\"\n", + ",SPRITE_DIR / \"Block-H.sprite3\" / \"93426b2f313d1bdedff368d94fc989d6.png\"\n", + ",SPRITE_DIR / \"Block-I.sprite3\" / \"f911b18605f59c75adf4d83e07811fd8.png\"\n", + ",SPRITE_DIR / \"Block-J.sprite3\" / \"8580c990ac918577550165447f870542.png\"\n", + ",SPRITE_DIR / \"Block-K.sprite3\" / \"d93a9fd4bfb5bc1e9790945fa756b748.png\"\n", + ",SPRITE_DIR / \"Block-L.sprite3\" / \"579c90cbaf847e9adf4faf37f340b32d.png\"\n", + ",SPRITE_DIR / \"Block-M.sprite3\" / \"6c5cf1fd0673f441b04e15e799685831.png\"\n", + ",SPRITE_DIR / \"Block-N.sprite3\" / \"9eba5dd44d65e1d421c40686fecde906.png\"\n", + ",SPRITE_DIR / \"Block-O.sprite3\" / \"8bbbde09c13a06015e554ab36fa178c0.png\"\n", + ",SPRITE_DIR / \"Block-P.sprite3\" / \"0f920b99ac49421cf28e55c8d863bdc5.png\"\n", + ",SPRITE_DIR / \"Block-Q.sprite3\" / \"67f8e80eabaec4883eb9c67c9527004a.png\"\n", + ",SPRITE_DIR / \"Block-R.sprite3\" / \"9d0432c5575451e251990d89845f8d00.png\"\n", + ",SPRITE_DIR / \"Block-S.sprite3\" / \"83c7486b08e78d099b4e776aaa2783fe.png\"\n", + ",SPRITE_DIR / \"Block-T.sprite3\" / \"6c1b26611ec0483f601a648f59305aff.png\"\n", + ",SPRITE_DIR / \"Block-U.sprite3\" / \"d02f77994789f528f0aaa7f211690151.png\"\n", + ",SPRITE_DIR / \"Block-V.sprite3\" / \"0654cfcb6234406837336e90be7e419c.png\"\n", + ",SPRITE_DIR / \"Block-W.sprite3\" / \"2b3145ae89c32793c4fcea9a6bcc6075.png\"\n", + ",SPRITE_DIR / \"Block-X.sprite3\" / \"a73f354dc045bbbc5a491d9367192a80.png\"\n", + ",SPRITE_DIR / \"Block-Y.sprite3\" / \"e13e79f106d32a3176dbcf5c1b35827d.png\"\n", + ",SPRITE_DIR / \"Block-Z.sprite3\" / \"c57d371b291d43675f46601518098572.png\"\n", + ",SPRITE_DIR / \"Bowl.sprite3\" / \"d147f16e3e2583719c073ac5b55fe3ca.png\"\n", + ",SPRITE_DIR / \"Bowtie.sprite3\" / \"4b032ba44b8077439e73815542e7ed23.png\"\n", + ",SPRITE_DIR / \"Bread.sprite3\" / \"585de1550446d4420f8a10fdecac995b.png\"\n", + ",SPRITE_DIR / \"Broom.sprite3\" / \"556288a1c996345c751a3dc88b570cfa.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"148034b1557cc3dae39953e43ab50ff0.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"4212ff1769c169bfa0db043b18fdade8.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"80b120b7152ed72fded84fef485f4f79.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"8f64966be60d332b345598819c67a8b6.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"a8c977a3b85ffe8c8b453c9d668989b8.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"bb47a3d5d03a34937557c558c6cb5d18.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"d1fcce0aac589a17324943a3b759fc2a.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"e4764cfc384a499f92da3ea745bcebe2.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"e8c9508b1f6a0a432e09c10ef9ada67c.png\"\n", + ",SPRITE_DIR / \"Buildings.sprite3\" / \"fcedb6b25a2db6de28b39130f978b0bf.png\"\n", + ",SPRITE_DIR / \"Butterfly 1.sprite3\" / \"34b76c1835c6a7fc2c47956e49bb0f52.png\"\n", + ",SPRITE_DIR / \"Butterfly 1.sprite3\" / \"49c9f952007d870a046cff93b6e5e098.png\"\n", + ",SPRITE_DIR / \"Butterfly 1.sprite3\" / \"fe98df7367e314d9640bfaa54fc239be.png\"\n", + ",SPRITE_DIR / \"Butterfly 2.sprite3\" / \"372ae0abd2e8e50a20bc12cb160d8746.png\"\n", + ",SPRITE_DIR / \"Butterfly 2.sprite3\" / \"e96f4c6913107c9b790d37bb65507c14.png\"\n", + ",SPRITE_DIR / \"Button1.sprite3\" / \"21fb7fa07eac4794fded0be4e18e20a2.png\"\n", + ",SPRITE_DIR / \"Button2.sprite3\" / \"329bf3d86050ceaea2b27e2c5d2baec1.png\"\n", + ",SPRITE_DIR / \"Button2.sprite3\" / \"af4cd54e776031bc9cc54ddd6892f97b.png\"\n", + ",SPRITE_DIR / \"Button3.sprite3\" / \"5021f6b7d166873ef0711c4d4a351912.png\"\n", + ",SPRITE_DIR / \"Button3.sprite3\" / \"a3b357ea21773bcb3545a227ee877e9a.png\"\n", + ",SPRITE_DIR / \"Button4.sprite3\" / \"71ced7c192168c7b221d16b4eaff440e.png\"\n", + ",SPRITE_DIR / \"Button4.sprite3\" / \"7d34ad26633abbc752c9cd93ace0a81f.png\"\n", + ",SPRITE_DIR / \"Button5.sprite3\" / \"94957f2f79e8970d8b2cd0f74a0c1ffc.png\"\n", + ",SPRITE_DIR / \"Button5.sprite3\" / \"a4bb9a9e06e65337798471035719985a.png\"\n", + ",SPRITE_DIR / \"Cake.sprite3\" / \"862488bf66b67c5330cae9235b853b6e.png\"\n", + ",SPRITE_DIR / \"Cake.sprite3\" / \"dfe9c5d40da0dcc386fad524c36d3579.png\"\n", + ",SPRITE_DIR / \"Calvrett.sprite3\" / \"452683db3ad7a882f5ab9de496441592.png\"\n", + ",SPRITE_DIR / \"Calvrett.sprite3\" / \"728ec1ebc275b53809023a36c66eeaa3.png\"\n", + ",SPRITE_DIR / \"Casey.sprite3\" / \"50bd5162671b8a30fcfa3082a9e79ec4.png\"\n", + ",SPRITE_DIR / \"Casey.sprite3\" / \"e09e5ef2bdeb69163a543f3216c1f54c.png\"\n", + ",SPRITE_DIR / \"Casey.sprite3\" / \"e5a47371f3e9f853b36560cda35344b6.png\"\n", + ",SPRITE_DIR / \"Casey.sprite3\" / \"ebc3de539e02801d420268eb189c5a47.png\"\n", + ",SPRITE_DIR / \"Cassy Dance.sprite3\" / \"63483bbf72fc55719918a335e1a16426.png\"\n", + ",SPRITE_DIR / \"Cassy Dance.sprite3\" / \"6cb3686db1fa658b6541cc9fa3ccfcc7.png\"\n", + ",SPRITE_DIR / \"Cassy Dance.sprite3\" / \"aca39a47cf3affd8a83d3287d2856c29.png\"\n", + ",SPRITE_DIR / \"Cassy Dance.sprite3\" / \"f801cec764da5ef6374e1d557296d14e.png\"\n", + ",SPRITE_DIR / \"Cat 2.sprite3\" / \"7499cf6ec438d0c7af6f896bc6adc294.png\"\n", + ",SPRITE_DIR / \"Cat Flying.sprite3\" / \"6667936a2793aade66c765c329379ad0.png\"\n", + ",SPRITE_DIR / \"Cat Flying.sprite3\" / \"a1ab94c8172c3b97ed9a2bf7c32172cd.png\"\n", + ",SPRITE_DIR / \"Cat.sprite3\" / \"0fb9be3e8397c983338cb71dc84d0b25.png\"\n", + ",SPRITE_DIR / \"Cat.sprite3\" / \"bcf454acf82e4504149f7ffe07081dbc.png\"\n", + ",SPRITE_DIR / \"Catcher.sprite3\" / \"895cdda4f2bd9d6f50ff07188e7ce395.png\"\n", + ",SPRITE_DIR / \"Catcher.sprite3\" / \"8aa875f077c405e2045f5ab60705e712.png\"\n", + ",SPRITE_DIR / \"Catcher.sprite3\" / \"99af13802e9bfd7b4a4bfb8ead825c0c.png\"\n", + ",SPRITE_DIR / \"Catcher.sprite3\" / \"a31e30677637ae4de975d40b6d822853.png\"\n", + ",SPRITE_DIR / \"Centaur.sprite3\" / \"2373556e776cad3ba4d6ee04fc34550b.png\"\n", + ",SPRITE_DIR / \"Centaur.sprite3\" / \"c00ffa6c5dd0baf9f456b897ff974377.png\"\n", + ",SPRITE_DIR / \"Centaur.sprite3\" / \"d722329bd9373ad80625e5be6d52f3ed.png\"\n", + ",SPRITE_DIR / \"Centaur.sprite3\" / \"d7aa990538915b7ef1f496d7e8486ade.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"20318b14a332fd618ec91e7c1de8be9a.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"26fdff424232926001d20041c3d5673b.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"56f3220fa82d99dcfc7d27d433ed01e4.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"68453506ae4b6b60a3fc6817ba39d492.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"7b073f47fbd9421e0d60daacc157f506.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"a28ffc2b129fb359ff22c79c48341267.png\"\n", + ",SPRITE_DIR / \"Champ99.sprite3\" / \"d6ae13605610aa008d48b0c8b25a57d3.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"03bc23a9fa12c1244c83a07a81f20bfd.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"0f18f9e90d0ed68ebec23da087eb2603.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"1044a68cc743f83564e36a6bca16830b.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"1e303bb57aac0cb4678e85de4251f3f4.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"527ba82c5e82f43c8fca0be905dbe20a.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"5e2f620e5687a36e1954414054c69ccc.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"6be261800647c53becb1f93ed31ed13e.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"6d5ddfc69f9c6a3f1d2ded1428237931.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"6f78ce6a87d114162ed9fbef30f9a0fd.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"984043e1e7c544999c31f952d1d43a56.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"b37d0e0d46f07cb2cbdc5285e176bf62.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"cc0be722cf93eef63726bd606ab11c5c.png\"\n", + ",SPRITE_DIR / \"Characters 1.sprite3\" / \"f26b130c2c58b812be21d1a9745863a1.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"1cf73a791959e07b5bafe18474f93b78.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"67d425b11544caa0fe9228f355c6485b.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"7084b3baab935de819cc5ab46f7cecf8.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"93e035270675f933b94ee951d7e475e3.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"bf0d808f7bf0c11c338b4fea0a735874.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"db3f436fcb6fb28828a4c932b60feb5e.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"df7cbf2913bcea721df2e0360644f193.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"e0eacf1e575adc559c41e3a81a892168.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"e8b44b0e904fd4bb7430c26b743f1520.png\"\n", + ",SPRITE_DIR / \"Characters 2.sprite3\" / \"f4f2778df2840de5a6449a49f3efb599.png\"\n", + ",SPRITE_DIR / \"Cheesy Puffs.sprite3\" / \"82772a61ec74974e84c686c61ea0b7d5.png\"\n", + ",SPRITE_DIR / \"Chick.sprite3\" / \"5e23c8c28ffd390df7deb2414be37781.png\"\n", + ",SPRITE_DIR / \"Chick.sprite3\" / \"77911bbe5e11ede35871e8002a26356d.png\"\n", + ",SPRITE_DIR / \"Chick.sprite3\" / \"80abbc427366bca477ccf1ef0faf240a.png\"\n", + ",SPRITE_DIR / \"City Bus.sprite3\" / \"7d7e26014a346b894db8ab1819f2167f.png\"\n", + ",SPRITE_DIR / \"City Bus.sprite3\" / \"e9694adbff9422363e2ea03166015393.png\"\n", + ",SPRITE_DIR / \"Cloud.sprite3\" / \"c9630e30e59e4565e785a26f58568904.png\"\n", + ",SPRITE_DIR / \"Clouds.sprite3\" / \"0188b2c7c85176b462881c6bca7a7748.png\"\n", + ",SPRITE_DIR / \"Clouds.sprite3\" / \"9105d7dd90b5f2a4b85a1e71aff8703f.png\"\n", + ",SPRITE_DIR / \"Clouds.sprite3\" / \"9f2eccce13e3e5fd212efd59ff1d96a0.png\"\n", + ",SPRITE_DIR / \"Clouds.sprite3\" / \"9f5958f46d21e33d3f6d7caffbe0daa9.png\"\n", + ",SPRITE_DIR / \"Convertible 2.sprite3\" / \"621817ef84ad81f5690fac95adab2ede.png\"\n", + ",SPRITE_DIR / \"Convertible.sprite3\" / \"5b883f396844ff5cfecd7c95553fa4fb.png\"\n", + ",SPRITE_DIR / \"Crab.sprite3\" / \"49839aa1b0feed02a3c759db5f8dee71.png\"\n", + ",SPRITE_DIR / \"Crab.sprite3\" / \"f7cdd2acbc6d7559d33be8675059c79e.png\"\n", + ",SPRITE_DIR / \"Crystal.sprite3\" / \"0a7b872042cecaf30cc154c0144f002b.png\"\n", + ",SPRITE_DIR / \"Crystal.sprite3\" / \"ecd1e7805b37db4caf207b7eef2b7a42.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"05529eb3c09294bd15f57c6f10d5894e.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"12db59633a1709a2c39534d35263791f.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"19bd7995d37e3baade673b2fe7cb982b.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"32ec7b5332cfebd1cfed7f6b79c76e67.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"344384a6a3f1bdf494cc7af31e928d36.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"3cdebabdb41f6c3e84561cf3ea87bac3.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"50faf1630ea383c0b8c77f70a9329797.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"70da166596bb484eae1bfbaad5c03d54.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"729812366245c0dafd456339c9d94e08.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"a22da98e5e63de7b2883355afd0184f0.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"a4b5d644d9abdbcab236acf19b2a2e81.png\"\n", + ",SPRITE_DIR / \"D-Money Dance.sprite3\" / \"dafbdfe454c5ec7029b5c1e07fcabc90.png\"\n", + ",SPRITE_DIR / \"Dan.sprite3\" / \"307250744e230fb15e7062238bf2634c.png\"\n", + ",SPRITE_DIR / \"Dan.sprite3\" / \"89b55d049f4b3811676311df00681385.png\"\n", + ",SPRITE_DIR / \"Dani.sprite3\" / \"2cba86439098a7e0daa46e0ff8a59f7c.png\"\n", + ",SPRITE_DIR / \"Dani.sprite3\" / \"6518333c95cf96a9aaf73a4a948e002f.png\"\n", + ",SPRITE_DIR / \"Dani.sprite3\" / \"b5f989e21b56af371209369c331b821e.png\"\n", + ",SPRITE_DIR / \"Dee.sprite3\" / \"1de3bbee2771b0ff16c4658d5ad98b0b.png\"\n", + ",SPRITE_DIR / \"Dee.sprite3\" / \"320a892c86e9b039ba9d6d50a4897276.png\"\n", + ",SPRITE_DIR / \"Dee.sprite3\" / \"43bd4c241a94b3aea883472d7dab5afc.png\"\n", + ",SPRITE_DIR / \"Dee.sprite3\" / \"c57c4593701165cdea6de9b014c7c06d.png\"\n", + ",SPRITE_DIR / \"Dee.sprite3\" / \"e4c6ada3509f7033d14bac2c0eea49dc.png\"\n", + ",SPRITE_DIR / \"Devin.sprite3\" / \"5ab51aeaa296e955e75a7a3c103ebb99.png\"\n", + ",SPRITE_DIR / \"Devin.sprite3\" / \"5f614017dba0ce6bff063f6c62041035.png\"\n", + ",SPRITE_DIR / \"Devin.sprite3\" / \"9d7414a719d6cc5e0e9071ede200a29c.png\"\n", + ",SPRITE_DIR / \"Devin.sprite3\" / \"bfc7c20b64f86d4b207780f3da695fa4.png\"\n", + ",SPRITE_DIR / \"Dinosaur1.sprite3\" / \"22d94ee5daf557284465425a61186234.png\"\n", + ",SPRITE_DIR / \"Dinosaur1.sprite3\" / \"45b02fbd582c15a50e1953830b59b377.png\"\n", + ",SPRITE_DIR / \"Dinosaur1.sprite3\" / \"7f89417968116ada83d4ddaad22403b3.png\"\n", + ",SPRITE_DIR / \"Dinosaur1.sprite3\" / \"af158d368bf3da576369be1130e18acd.png\"\n", + ",SPRITE_DIR / \"Dinosaur2.sprite3\" / \"0e43f8e573bf232505b207b92efac2ac.png\"\n", + ",SPRITE_DIR / \"Dinosaur2.sprite3\" / \"7799f2848136d11f48ca5f3105d336ef.png\"\n", + ",SPRITE_DIR / \"Dinosaur2.sprite3\" / \"d926c5758d130fcfd9a7ae7dac47e47d.png\"\n", + ",SPRITE_DIR / \"Dinosaur2.sprite3\" / \"e606ba27dfe94daf3d8e3fdf599e37cf.png\"\n", + ",SPRITE_DIR / \"Dinosaur3.sprite3\" / \"5381feb0fc1b50ddc2793342daddffef.png\"\n", + ",SPRITE_DIR / \"Dinosaur3.sprite3\" / \"ae98efa1c3c3700602e1344db86aaf72.png\"\n", + ",SPRITE_DIR / \"Dinosaur3.sprite3\" / \"cf4fb77a4e9839f83d3fa5fc0982ccd3.png\"\n", + ",SPRITE_DIR / \"Dinosaur3.sprite3\" / \"d85ec1b97f73564ef26fec73d5056c68.png\"\n", + ",SPRITE_DIR / \"Dinosaur3.sprite3\" / \"e731d1f1ebf4bc0ea55b850ffe5a5f96.png\"\n", + ",SPRITE_DIR / \"Dinosaur4.sprite3\" / \"723bd1559f8baae4184fa24a6513362b.png\"\n", + ",SPRITE_DIR / \"Dinosaur4.sprite3\" / \"a98e3f93853513e7c00bab4c61752312.png\"\n", + ",SPRITE_DIR / \"Dinosaur4.sprite3\" / \"ac99ef62e3e018b8db550bb2a187cbe9.png\"\n", + ",SPRITE_DIR / \"Dinosaur4.sprite3\" / \"c63cca929380152b978d8671fe6003f7.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"26fca11e4251d60ed7aa5d08f4ae2a69.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"3b2cf97b1cc7fc535162ba5849a0e29c.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"42e3bf118c775ba54239af4276800a0a.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"5882227a9e2f0f3b2014c49328969762.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"5a0832162a0cfa7adab6090c42e89714.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"9d200a7c2e93eac8cf52ede3a87d7969.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"c4044a3badea77ced4f2db69aff866ed.png\"\n", + ",SPRITE_DIR / \"Dinosaur5.sprite3\" / \"f49b3b098a24474f20c8f4686681c611.png\"\n", + ",SPRITE_DIR / \"Diver1.sprite3\" / \"a24f23a0f5d77cfb59721ef8f6bfe5c7.png\"\n", + ",SPRITE_DIR / \"Diver2.sprite3\" / \"ef8136a42b7d20961756e551bc87b37f.png\"\n", + ",SPRITE_DIR / \"Dog1.sprite3\" / \"35cd78a8a71546a16c530d0b2d7d5a7f.png\"\n", + ",SPRITE_DIR / \"Dog1.sprite3\" / \"d5a72e1eb23a91df4b53c0b16493d1e6.png\"\n", + ",SPRITE_DIR / \"Dog2.sprite3\" / \"4708bff29b3a295a03ac1d5e2d16ec75.png\"\n", + ",SPRITE_DIR / \"Dog2.sprite3\" / \"66b435d333f34d02d5ae49a598bcc5b3.png\"\n", + ",SPRITE_DIR / \"Dog2.sprite3\" / \"6afc06388d69f99e28d883126f9b2734.png\"\n", + ",SPRITE_DIR / \"Donut.sprite3\" / \"316a67c9e966fd015b4538f54be456db.png\"\n", + ",SPRITE_DIR / \"Dorian.sprite3\" / \"603d3dd151984c0eaa2822f70a234c28.png\"\n", + ",SPRITE_DIR / \"Dorian.sprite3\" / \"7d20ec98603857c031c1f4ad2bd8ea51.png\"\n", + ",SPRITE_DIR / \"Dorian.sprite3\" / \"8f2be2387efcbb5d4878886adaa2a88e.png\"\n", + ",SPRITE_DIR / \"Dorian.sprite3\" / \"a9a064a1f28c9e22b594dcea1d46025b.png\"\n", + ",SPRITE_DIR / \"Dot.sprite3\" / \"106461f60e34ce231b323e2dd2d9f05b.png\"\n", + ",SPRITE_DIR / \"Dot.sprite3\" / \"21482022f9930400302bc8ec70643717.png\"\n", + ",SPRITE_DIR / \"Dot.sprite3\" / \"9e5a6cc6970ce4932a09affba70a45b0.png\"\n", + ",SPRITE_DIR / \"Dot.sprite3\" / \"fb047c94113ee4c6664305a338525e6a.png\"\n", + ",SPRITE_DIR / \"Dove.sprite3\" / \"0f83ab55012a7affd94e38250d55a0a0.png\"\n", + ",SPRITE_DIR / \"Dove.sprite3\" / \"778a699a044a0a8c10f44c3194e21ef2.png\"\n", + ",SPRITE_DIR / \"Dragon.sprite3\" / \"12ead885460d96a19132e5970839d36d.png\"\n", + ",SPRITE_DIR / \"Dragon.sprite3\" / \"3f672475ad4ca5d1f9331cffd4223140.png\"\n", + ",SPRITE_DIR / \"Dragon.sprite3\" / \"e0aa0083fa0b97da97600d4dbb2055e5.png\"\n", + ",SPRITE_DIR / \"Dragonfly.sprite3\" / \"17b864c1ddd4b349a6c4bd5709167307.png\"\n", + ",SPRITE_DIR / \"Dragonfly.sprite3\" / \"5cdfe67af929e3fb095e83c9c4b0bd78.png\"\n", + ",SPRITE_DIR / \"Dress.sprite3\" / \"4e22e6fd72500f0a25b959283bfd0a32.png\"\n", + ",SPRITE_DIR / \"Dress.sprite3\" / \"c5fb135d89573570010b0d96c94bcec6.png\"\n", + ",SPRITE_DIR / \"Dress.sprite3\" / \"ddbea537af6012ebac18d16d65c07479.png\"\n", + ",SPRITE_DIR / \"Drum Kit.sprite3\" / \"3f4fb4836338c55f883607c403b2b25e.png\"\n", + ",SPRITE_DIR / \"Drum Kit.sprite3\" / \"baf6344b6f55b074786a383c1097697d.png\"\n", + ",SPRITE_DIR / \"Drum-cymbal.sprite3\" / \"08355ec8cc4b3263f502adfdea993cda.png\"\n", + ",SPRITE_DIR / \"Drum-cymbal.sprite3\" / \"78398692e6fa226568df0374c4358da4.png\"\n", + ",SPRITE_DIR / \"Drum-highhat.sprite3\" / \"15b2a31a57d0cd911ad0b1c265dcf59e.png\"\n", + ",SPRITE_DIR / \"Drum-highhat.sprite3\" / \"866b3a49ee2a45998940e2d737c4c502.png\"\n", + ",SPRITE_DIR / \"Drum-snare.sprite3\" / \"28298d93f5282041267a92bd67308107.png\"\n", + ",SPRITE_DIR / \"Drum-snare.sprite3\" / \"c42bb05aab3cacddcd88712e33ab8df0.png\"\n", + ",SPRITE_DIR / \"Drum.sprite3\" / \"47531b5675be696d0540eb120d5d0678.png\"\n", + ",SPRITE_DIR / \"Drum.sprite3\" / \"ce6971317035091341ec40571c9056e9.png\"\n", + ",SPRITE_DIR / \"Drums Conga.sprite3\" / \"2b2eacfce0fb1af023e6ca0f5ef6defe.png\"\n", + ",SPRITE_DIR / \"Drums Conga.sprite3\" / \"bdad2f140cfbd021f38241fc9acc7fd2.png\"\n", + ",SPRITE_DIR / \"Drums Tabla.sprite3\" / \"992d6359be830d977559dad91b04f698.png\"\n", + ",SPRITE_DIR / \"Drums Tabla.sprite3\" / \"af071d9d714c5c622e2bb07133698ce3.png\"\n", + ",SPRITE_DIR / \"Duck.sprite3\" / \"c9837d0454f5f0f73df290af2045359b.png\"\n", + ",SPRITE_DIR / \"Earth.sprite3\" / \"7405b5efa96995bae6853667f8cd145e.png\"\n", + ",SPRITE_DIR / \"Easel.sprite3\" / \"6a736beddc7844538be390c18b7c4361.png\"\n", + ",SPRITE_DIR / \"Easel.sprite3\" / \"a4b3714322c11b350f09a75921ae606b.png\"\n", + ",SPRITE_DIR / \"Easel.sprite3\" / \"caec09682a7fcdffef4647e8355ba004.png\"\n", + ",SPRITE_DIR / \"Egg.sprite3\" / \"0d127490af16f8a4ca5ce3212b2391c2.png\"\n", + ",SPRITE_DIR / \"Egg.sprite3\" / \"41535b4742f40e2630746b0c4bec98f2.png\"\n", + ",SPRITE_DIR / \"Egg.sprite3\" / \"b0b6e88ec64b842398200bab562b53e3.png\"\n", + ",SPRITE_DIR / \"Egg.sprite3\" / \"bb0505b802140a8cc200c9f8bfce4503.png\"\n", + ",SPRITE_DIR / \"Egg.sprite3\" / \"f8ee449298c1446cb0ef281923a4e57a.png\"\n", + ",SPRITE_DIR / \"Egg.sprite3\" / \"fbc629c3b062423e8c09cfacfb1e65f8.png\"\n", + ",SPRITE_DIR / \"Elephant.sprite3\" / \"2c9b5e0125d95b8bc511f6bb09b5ea2f.png\"\n", + ",SPRITE_DIR / \"Elephant.sprite3\" / \"b59873e9558c1c456200f50e5ab34770.png\"\n", + ",SPRITE_DIR / \"Elf.sprite3\" / \"524406c2b1fe253c1565ff516309817e.png\"\n", + ",SPRITE_DIR / \"Elf.sprite3\" / \"808c6fa2eb1cba0de1d17b18c6f41279.png\"\n", + ",SPRITE_DIR / \"Elf.sprite3\" / \"92ff640b911a8348d2734c0e38bba68c.png\"\n", + ",SPRITE_DIR / \"Elf.sprite3\" / \"e92abad171396a3198455df8557802e5.png\"\n", + ",SPRITE_DIR / \"Elf.sprite3\" / \"ec458328a85f89f06866e2337076ac0a.png\"\n", + ",SPRITE_DIR / \"Fairy.sprite3\" / \"40d726e17bfd2ffeb8c0aa5393ee1c77.png\"\n", + ",SPRITE_DIR / \"Fairy.sprite3\" / \"902350bba0d4b4612db1e2e902b6f201.png\"\n", + ",SPRITE_DIR / \"Fairy.sprite3\" / \"bea920473027f43e04c44e588c6cc39a.png\"\n", + ",SPRITE_DIR / \"Fairy.sprite3\" / \"d4f6163a1610243f55dd9cf1c9875c61.png\"\n", + ",SPRITE_DIR / \"Fairy.sprite3\" / \"decd31f829032b1d4dcf5efdbd362cb9.png\"\n", + ",SPRITE_DIR / \"Fish.sprite3\" / \"4a3478b3cdc3e8688a671be88c2775fd.png\"\n", + ",SPRITE_DIR / \"Fish.sprite3\" / \"7a0c31c0087f342867d4754f8dc57541.png\"\n", + ",SPRITE_DIR / \"Fish.sprite3\" / \"886e0bb732453eb8d3a849b4eab54943.png\"\n", + ",SPRITE_DIR / \"Fish.sprite3\" / \"a9b3d163756621f8395592ad77fb9369.png\"\n", + ",SPRITE_DIR / \"Fishbowl.sprite3\" / \"17c53cf0296f24722ba5b001d513e58f.png\"\n", + ",SPRITE_DIR / \"Fishbowl.sprite3\" / \"b3db01c5cda32fe3ea0b48dde5fa8130.png\"\n", + ",SPRITE_DIR / \"Food Truck.sprite3\" / \"a77f9693f87288d023a4632cf019776e.png\"\n", + ",SPRITE_DIR / \"Food Truck.sprite3\" / \"e850e3c93de767519f7f78b38f16ed1d.png\"\n", + ",SPRITE_DIR / \"Food Truck.sprite3\" / \"f4150de2297a63c3efd125c8e12dd7cc.png\"\n", + ",SPRITE_DIR / \"Football.sprite3\" / \"7ee31371b2eafba57cc5a78fc1a787fe.png\"\n", + ",SPRITE_DIR / \"Football.sprite3\" / \"c717def72c8bd98749284d31b51d7097.png\"\n", + ",SPRITE_DIR / \"Fortune Cookie.sprite3\" / \"c56dcaa1fa4e3c9740142b93d5982850.png\"\n", + ",SPRITE_DIR / \"Fox.sprite3\" / \"2c256eacbb753be361e8e52a0eefde77.png\"\n", + ",SPRITE_DIR / \"Fox.sprite3\" / \"9dd59a4514b5373d4f665db78e145636.png\"\n", + ",SPRITE_DIR / \"Fox.sprite3\" / \"dd398ed81edb60c91ad4805f4437d2fa.png\"\n", + ",SPRITE_DIR / \"Frank.sprite3\" / \"10d39bb7e31647a465e747cd243b8cd0.png\"\n", + ",SPRITE_DIR / \"Frank.sprite3\" / \"26da9617218493f4f42a1592f21afee8.png\"\n", + ",SPRITE_DIR / \"Frank.sprite3\" / \"d16b76a634f7367ce7d6112401a78e57.png\"\n", + ",SPRITE_DIR / \"Frank.sprite3\" / \"e56e930cc0229d1042a673e7503209c5.png\"\n", + ",SPRITE_DIR / \"Frog 2 .sprite3\" / \"0717f446c991aac7df2fe4d6590354e7.png\"\n", + ",SPRITE_DIR / \"Frog 2 .sprite3\" / \"d9f69469090784d8dd68d94c0fd78a50.png\"\n", + ",SPRITE_DIR / \"Frog 2 .sprite3\" / \"f2246c13e4540472c484119bc314d954.png\"\n", + ",SPRITE_DIR / \"Frog.sprite3\" / \"390845c11df0924f3b627bafeb3f814e.png\"\n", + ",SPRITE_DIR / \"Fruit Platter.sprite3\" / \"6c3252378da3334f63eebddbed3fae91.png\"\n", + ",SPRITE_DIR / \"Fruit Salad.sprite3\" / \"2e6ef315101433b78e38719e8cc630c2.png\"\n", + ",SPRITE_DIR / \"Ghost.sprite3\" / \"40ba3a0b5b3899a655fd8867229d4ee3.png\"\n", + ",SPRITE_DIR / \"Ghost.sprite3\" / \"634744e3f98bee53e9cb477a63aa9b21.png\"\n", + ",SPRITE_DIR / \"Ghost.sprite3\" / \"d1d89391f1d9c74557e504456d58a002.png\"\n", + ",SPRITE_DIR / \"Ghost.sprite3\" / \"f522b08c5757569ad289d67bce290cd0.png\"\n", + ",SPRITE_DIR / \"Gift.sprite3\" / \"0fdd104de718c5fc4a65da429468bdbd.png\"\n", + ",SPRITE_DIR / \"Gift.sprite3\" / \"6cbeda5d391c6d107f0b853222f344d9.png\"\n", + ",SPRITE_DIR / \"Giga Walking.sprite3\" / \"3afad833094d8dff1c4ff79edcaa13d0.png\"\n", + ",SPRITE_DIR / \"Giga Walking.sprite3\" / \"d27716e022fb5f747d7b09fe6eeeca06.png\"\n", + ",SPRITE_DIR / \"Giga Walking.sprite3\" / \"db55131bf54f96e8986d9b30730e42ce.png\"\n", + ",SPRITE_DIR / \"Giga.sprite3\" / \"337b338b2b10176221e638ac537854e6.png\"\n", + ",SPRITE_DIR / \"Giga.sprite3\" / \"92161a11e851ecda94cbbb985018fed6.png\"\n", + ",SPRITE_DIR / \"Giga.sprite3\" / \"bc706a7648342aaacac9050378b40c43.png\"\n", + ",SPRITE_DIR / \"Giga.sprite3\" / \"db15886cfdcb5e2f4459e9074e3990a1.png\"\n", + ",SPRITE_DIR / \"Giraffe.sprite3\" / \"43e89629fb9df7051eaf307c695424fc.png\"\n", + ",SPRITE_DIR / \"Giraffe.sprite3\" / \"cfd93a103479993aee4d680655e39d8d.png\"\n", + ",SPRITE_DIR / \"Giraffe.sprite3\" / \"ef1fca2ae13d49d9dd2c6cfc211a687c.png\"\n", + ",SPRITE_DIR / \"Glass Water.sprite3\" / \"ca70c69ef1f797d353581a3f76116ae3.png\"\n", + ",SPRITE_DIR / \"Glass Water.sprite3\" / \"cbf21cf1b057852f91135d27ebbf11ce.png\"\n", + ",SPRITE_DIR / \"Glasses.sprite3\" / \"705035328ac53d5ce1aa5a1ed1c2d172.png\"\n", + ",SPRITE_DIR / \"Glasses.sprite3\" / \"9e2f75d3a09f3f10d554ba8380c3ae52.png\"\n", + ",SPRITE_DIR / \"Glasses.sprite3\" / \"acd85b36e6b8d93ba4194ee2ea334207.png\"\n", + ",SPRITE_DIR / \"Glasses.sprite3\" / \"f2a02d0e7431147b8a4a282e02a8e6a4.png\"\n", + ",SPRITE_DIR / \"Glow-0.sprite3\" / \"64b59074f24d0e2405a509a45c0dadba.png\"\n", + ",SPRITE_DIR / \"Glow-1.sprite3\" / \"9f75c26aa6c56168a3e5a4f598de2c94.png\"\n", + ",SPRITE_DIR / \"Glow-2.sprite3\" / \"e8d8bf59db37b5012dd643a16a636042.png\"\n", + ",SPRITE_DIR / \"Glow-3.sprite3\" / \"57f7afe3b9888cca56803b73a62e4227.png\"\n", + ",SPRITE_DIR / \"Glow-4.sprite3\" / \"b8209e1980475b30ff11e60d7633446d.png\"\n", + ",SPRITE_DIR / \"Glow-5.sprite3\" / \"aacb5b3cec637f192f080138b4ccd8d2.png\"\n", + ",SPRITE_DIR / \"Glow-6.sprite3\" / \"84d9f26050c709e6b98706c22d2efb3d.png\"\n", + ",SPRITE_DIR / \"Glow-7.sprite3\" / \"6194b9a251a905d0001a969990961724.png\"\n", + ",SPRITE_DIR / \"Glow-8.sprite3\" / \"55e95fb9c60fbebb7d20bba99c7e9609.png\"\n", + ",SPRITE_DIR / \"Glow-9.sprite3\" / \"0f53ee6a988bda07cba561d38bfbc36f.png\"\n", + ",SPRITE_DIR / \"Glow-A.sprite3\" / \"fd470938cce54248aaf240b16e845456.png\"\n", + ",SPRITE_DIR / \"Glow-B.sprite3\" / \"a699fa024889b681d8b8b6c5c86acb6d.png\"\n", + ",SPRITE_DIR / \"Glow-C.sprite3\" / \"51b8a7dd7a8cddc5bc30e35824cc557a.png\"\n", + ",SPRITE_DIR / \"Glow-D.sprite3\" / \"a3a66e37de8d7ebe0505594e036ef6d1.png\"\n", + ",SPRITE_DIR / \"Glow-E.sprite3\" / \"80382a5db3fa556276068165c547b432.png\"\n", + ",SPRITE_DIR / \"Glow-F.sprite3\" / \"67239f7d47f7b92bc38e2d8b275d54ab.png\"\n", + ",SPRITE_DIR / \"Glow-G.sprite3\" / \"56839bc48957869d980c6f9b6f5a2a91.png\"\n", + ",SPRITE_DIR / \"Glow-H.sprite3\" / \"d6016c6494153cd5735ee4b6a1b05277.png\"\n", + ",SPRITE_DIR / \"Glow-I.sprite3\" / \"9077988af075c80cc403b1d6e5891528.png\"\n", + ",SPRITE_DIR / \"Glow-J.sprite3\" / \"6c359eff57abf5bb6db55894d08757c3.png\"\n", + ",SPRITE_DIR / \"Glow-K.sprite3\" / \"e932898d1e6fe3950a266fccaba0c3e6.png\"\n", + ",SPRITE_DIR / \"Glow-L.sprite3\" / \"dcee9202cf20e0395971f1ee73c45d37.png\"\n", + ",SPRITE_DIR / \"Glow-M.sprite3\" / \"26f81aa5990bf2371acaa8d76fe1e87f.png\"\n", + ",SPRITE_DIR / \"Glow-N.sprite3\" / \"d55a04ada14958eccc4aef446a4dad57.png\"\n", + ",SPRITE_DIR / \"Glow-O.sprite3\" / \"64b59074f24d0e2405a509a45c0dadba.png\"\n", + ",SPRITE_DIR / \"Glow-P.sprite3\" / \"c6edc2603ad4db3aa0b29f80e3e38cff.png\"\n", + ",SPRITE_DIR / \"Glow-Q.sprite3\" / \"e4ae18bf8b92ae375ce818d754588c76.png\"\n", + ",SPRITE_DIR / \"Glow-R.sprite3\" / \"bb11b49e19c68452331e78d51081ab42.png\"\n", + ",SPRITE_DIR / \"Glow-S.sprite3\" / \"6fd994b41bcf776fbf1f1521a879f1af.png\"\n", + ",SPRITE_DIR / \"Glow-T.sprite3\" / \"d687543649a676a14f408b5890d45f05.png\"\n", + ",SPRITE_DIR / \"Glow-U.sprite3\" / \"cb8ef2244400a57ba08e918cb4fe8bba.png\"\n", + ",SPRITE_DIR / \"Glow-V.sprite3\" / \"c6edc1ac2c5979f389598537cfb28096.png\"\n", + ",SPRITE_DIR / \"Glow-W.sprite3\" / \"2e0c2bb46c4ca3cf97779f749b1556f6.png\"\n", + ",SPRITE_DIR / \"Glow-X.sprite3\" / \"0b98a63dcc55251072a95a6c6bf7f6f2.png\"\n", + ",SPRITE_DIR / \"Glow-Y.sprite3\" / \"532494c9b5e6709f9982c00a48ce6870.png\"\n", + ",SPRITE_DIR / \"Glow-Z.sprite3\" / \"2d94d83dcc9ee3a107e5ea7ef0dddeb0.png\"\n", + ",SPRITE_DIR / \"Goalie.sprite3\" / \"59eedd0a23c3c983d386a0c125991c7f.png\"\n", + ",SPRITE_DIR / \"Goalie.sprite3\" / \"63f2955298d59dd22dc7b7c6a9c521e2.png\"\n", + ",SPRITE_DIR / \"Goalie.sprite3\" / \"a554f2a9b49a09ec67d1fd7ecfbcddcd.png\"\n", + ",SPRITE_DIR / \"Goalie.sprite3\" / \"eb096e2b4234f5f8ee1f2c44429eaa1a.png\"\n", + ",SPRITE_DIR / \"Goalie.sprite3\" / \"f2e7ba53f3a28c4359cb0d3e3cb4001a.png\"\n", + ",SPRITE_DIR / \"Goblin.sprite3\" / \"2add9ef4eaa25f8915406dcfd8bafc9f.png\"\n", + ",SPRITE_DIR / \"Goblin.sprite3\" / \"3f08380f25062b8055a1800f5dad14bd.png\"\n", + ",SPRITE_DIR / \"Goblin.sprite3\" / \"afb9fe328adae617ee3375366fca02e7.png\"\n", + ",SPRITE_DIR / \"Goblin.sprite3\" / \"b8604b8039d6b633015aaf17d74d5d5b.png\"\n", + ",SPRITE_DIR / \"Gobo.sprite3\" / \"5c0896569305ab177d87caa31aad2a72.png\"\n", + ",SPRITE_DIR / \"Gobo.sprite3\" / \"9d8021c216fb92cc708e1e96f3ed2b52.png\"\n", + ",SPRITE_DIR / \"Gobo.sprite3\" / \"f505a4e9eab5e40e2669a4462dba4c90.png\"\n", + ",SPRITE_DIR / \"Grasshopper.sprite3\" / \"529644c5ecdca63adafd87777e341ad7.png\"\n", + ",SPRITE_DIR / \"Grasshopper.sprite3\" / \"93550d8abde130ad149904c4448f8b65.png\"\n", + ",SPRITE_DIR / \"Grasshopper.sprite3\" / \"a7c638b8aa86f2a758830f8c2b0e4cf5.png\"\n", + ",SPRITE_DIR / \"Grasshopper.sprite3\" / \"cf2ac769df444137b4c1eec472fa4b92.png\"\n", + ",SPRITE_DIR / \"Grasshopper.sprite3\" / \"d4f3dfe69be6537e73544381408a820d.png\"\n", + ",SPRITE_DIR / \"Grasshopper.sprite3\" / \"e7210a370837dd1e4ebc1a56a973b7f6.png\"\n", + ",SPRITE_DIR / \"Green Flag.sprite3\" / \"2bbfd072183a67db5eddb923fe0726b3.png\"\n", + ",SPRITE_DIR / \"Griffin.sprite3\" / \"102f6200c13bd60afa9538c712776fb0.png\"\n", + ",SPRITE_DIR / \"Griffin.sprite3\" / \"157d3665cebcd41fa814b9217af99476.png\"\n", + ",SPRITE_DIR / \"Griffin.sprite3\" / \"a31166d45903206b52cb0f0a0cb687b5.png\"\n", + ",SPRITE_DIR / \"Griffin.sprite3\" / \"b8c8745820a341afec08e77f4a254551.png\"\n", + ",SPRITE_DIR / \"Guitar-electric1.sprite3\" / \"57c6d7dc148576cb2f36e53dea49260a.png\"\n", + ",SPRITE_DIR / \"Guitar-electric1.sprite3\" / \"677aed0b1168caf4b3ec565b9104dbe0.png\"\n", + ",SPRITE_DIR / \"Guitar-electric2.sprite3\" / \"83db2d0e342257e534ccdf0ec17bf668.png\"\n", + ",SPRITE_DIR / \"Guitar-electric2.sprite3\" / \"bb88e6a8a08a4034cc155b1137743ca1.png\"\n", + ",SPRITE_DIR / \"Guitar.sprite3\" / \"8704489dcf1a3ca93c5db40ebe5acd38.png\"\n", + ",SPRITE_DIR / \"Guitar.sprite3\" / \"e0423f4743f39456dade16fa1223d6b0.png\"\n", + ",SPRITE_DIR / \"Hannah.sprite3\" / \"5fdce07935156bbcf943793fa84e826c.png\"\n", + ",SPRITE_DIR / \"Hannah.sprite3\" / \"b983d99560313e38b4b3cd36cbd5f0d1.png\"\n", + ",SPRITE_DIR / \"Hannah.sprite3\" / \"d0c3b4b24fbf1152de3ebb68f6b875ae.png\"\n", + ",SPRITE_DIR / \"Hare.sprite3\" / \"7269593d83b6f9eae512997f541a7417.png\"\n", + ",SPRITE_DIR / \"Hare.sprite3\" / \"85a3b8c151e10576fa531a4293fdac00.png\"\n", + ",SPRITE_DIR / \"Hare.sprite3\" / \"c8dbb4302dd489a201938c203018c2f0.png\"\n", + ",SPRITE_DIR / \"Harper.sprite3\" / \"3a0973a042ee16e816c568651316d5d4.png\"\n", + ",SPRITE_DIR / \"Harper.sprite3\" / \"98ce6e6bb99f8ba116f127fdf2e739fd.png\"\n", + ",SPRITE_DIR / \"Harper.sprite3\" / \"e407fa0ed992393d12d0a108c11e2fa6.png\"\n", + ",SPRITE_DIR / \"Hat1 .sprite3\" / \"0aed53a86d92ec2283068000ac97a60b.png\"\n", + ",SPRITE_DIR / \"Hat1 .sprite3\" / \"13e382ae3f05a9a23e0b64ca23230438.png\"\n", + ",SPRITE_DIR / \"Hat1 .sprite3\" / \"6349e36da9897a2f89bdbf5c77dbdacb.png\"\n", + ",SPRITE_DIR / \"Hat1 .sprite3\" / \"c632719725400c604fcadf0858ce2b2c.png\"\n", + ",SPRITE_DIR / \"Hatchling.sprite3\" / \"0e5c295a043d5e183a98046e4f734b72.png\"\n", + ",SPRITE_DIR / \"Hatchling.sprite3\" / \"55f7d457eb0af78cb309ca47497c490f.png\"\n", + ",SPRITE_DIR / \"Hatchling.sprite3\" / \"f27d557be70a9522fae4392bfd4f5249.png\"\n", + ",SPRITE_DIR / \"Heart Candy.sprite3\" / \"288976865e8c5db717d859e915606d82.png\"\n", + ",SPRITE_DIR / \"Heart Candy.sprite3\" / \"3ee430ba825f41ae9913453d4932fb8b.png\"\n", + ",SPRITE_DIR / \"Heart Candy.sprite3\" / \"51248e76be2aa7a0f0ed77bc94af1b3a.png\"\n", + ",SPRITE_DIR / \"Heart Candy.sprite3\" / \"5fa8c4693cf8cba8cdbcbed72f4f58aa.png\"\n", + ",SPRITE_DIR / \"Heart Face.sprite3\" / \"989770846f8cd1628b48bbe91d0a7d0d.png\"\n", + ",SPRITE_DIR / \"Heart.sprite3\" / \"c77e640f6e023e7ce1e376da0f26e1eb.png\"\n", + ",SPRITE_DIR / \"Heart.sprite3\" / \"e24731f5cf2759c2f289921bebb86ea2.png\"\n", + ",SPRITE_DIR / \"Hedgehog.sprite3\" / \"1fcbba4a2252e96c52d2d8aa8e593e51.png\"\n", + ",SPRITE_DIR / \"Hedgehog.sprite3\" / \"3251533232e7f44315512149c7f76214.png\"\n", + ",SPRITE_DIR / \"Hedgehog.sprite3\" / \"3b0e1717859808cecf1a45e2a32dc201.png\"\n", + ",SPRITE_DIR / \"Hedgehog.sprite3\" / \"42bac40ca828133600e0a9f7ba019adb.png\"\n", + ",SPRITE_DIR / \"Hedgehog.sprite3\" / \"93c2d7a0abefaf26ee50d5038ac5bf61.png\"\n", + ",SPRITE_DIR / \"Hen.sprite3\" / \"6c9e05f568862dbcea0a1652a210239b.png\"\n", + ",SPRITE_DIR / \"Hen.sprite3\" / \"b02a33e32313cc9a75781a6fafd07033.png\"\n", + ",SPRITE_DIR / \"Hen.sprite3\" / \"c9a4570a2d0ae09b9feeeb5607e4b9c7.png\"\n", + ",SPRITE_DIR / \"Hen.sprite3\" / \"d055896a473bb12f4ec67af1fdb9c652.png\"\n", + ",SPRITE_DIR / \"Hippo1.sprite3\" / \"5764a2c650f225bc27cc0e6c5db401ea.png\"\n", + ",SPRITE_DIR / \"Hippo1.sprite3\" / \"911901dc568b56c15fe81819bc2af653.png\"\n", + ",SPRITE_DIR / \"Home Button.sprite3\" / \"1ebdcb9f033fa6658259b52da376b7ac.png\"\n", + ",SPRITE_DIR / \"Horse.sprite3\" / \"0e0fa871bea01c2dfb70e9955dc098be.png\"\n", + ",SPRITE_DIR / \"Horse.sprite3\" / \"ad458251c5bf5b375870829f1762fa47.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"3ddc912edef87ae29121f57294fa0cb5.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"4b9d2162e30dbb924840575ed35fddb0.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"5883bdefba451aaeac8d77c798d41eb0.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"5a683f4536abca0f83a77bc341df4c9a.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"63e56d28cc3e3d9b735e1f1d51248cc0.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"7fb579a98d6db257f1b16109d3c4609a.png\"\n", + ",SPRITE_DIR / \"Jaime.sprite3\" / \"d6cc9814f7a6640e4c2b1a4276987dc5.png\"\n", + ",SPRITE_DIR / \"Jamal.sprite3\" / \"2408318e743873c7254db1623441b9c5.png\"\n", + ",SPRITE_DIR / \"Jamal.sprite3\" / \"3c8d5e688450ad1e6bf024a32c55bcda.png\"\n", + ",SPRITE_DIR / \"Jamal.sprite3\" / \"693748d763c8da4b119a5e4bee6a1768.png\"\n", + ",SPRITE_DIR / \"Jamal.sprite3\" / \"92692e0c0f376797274392484ba74133.png\"\n", + ",SPRITE_DIR / \"Jar.sprite3\" / \"33b537168f3c2eb3dafeb739c22f38a6.png\"\n", + ",SPRITE_DIR / \"Jar.sprite3\" / \"e0f5ac773987470ff2467e3e01b9ab23.png\"\n", + ",SPRITE_DIR / \"Jellyfish.sprite3\" / \"00c99df84f8385038461d6c42a5465ab.png\"\n", + ",SPRITE_DIR / \"Jellyfish.sprite3\" / \"3158299771b3d34ed2c50a00fbab715e.png\"\n", + ",SPRITE_DIR / \"Jellyfish.sprite3\" / \"4e259b7c08f05145fc7800b33e4f356e.png\"\n", + ",SPRITE_DIR / \"Jellyfish.sprite3\" / \"5944a1e687fa31589517825b2144a17b.png\"\n", + ",SPRITE_DIR / \"Jordyn.sprite3\" / \"00c8c464c19460df693f8d5ae69afdab.png\"\n", + ",SPRITE_DIR / \"Jordyn.sprite3\" / \"768c4601174f0dfcb96b3080ccc3a192.png\"\n", + ",SPRITE_DIR / \"Jordyn.sprite3\" / \"a7cc1e5f02b58ecc8095cfc18eef0289.png\"\n", + ",SPRITE_DIR / \"Jordyn.sprite3\" / \"db4d97cbf24e2b8af665bfbf06f67fa0.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"01dd2f553c7262329ebaba2516e3a2b1.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"0ed4a09c41871d150c51119c1bceded2.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"2e2a6534d33883fdd2f8471a1adbebb7.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"3d3ea804243800981acabc7caba10939.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"6f68790ee3eb9bdccf8749305186b0dd.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"75ee2383fd83992b401c8a0730521d94.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"a12f40b18067bb31746f9cf461de88aa.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"a55fbb529c10f70bcb374aef8a63571b.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"a9fbc01a4124d555da12630312e46197.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"aabfedff0d11243386b6b0941e0f72e9.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"c2d5519e8a0f2214ff757117038c28dc.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"ea812b4c2b2405aa2b73158023298f71.png\"\n", + ",SPRITE_DIR / \"Jouvi Dance.sprite3\" / \"efaa8eb6c8cf7dc35d4d37d546ebd333.png\"\n", + ",SPRITE_DIR / \"Kai.sprite3\" / \"6e007fde15e49c66ee7996561f80b452.png\"\n", + ",SPRITE_DIR / \"Kai.sprite3\" / \"c1e1149f6d7e308e3e4eba14ccc8a751.png\"\n", + ",SPRITE_DIR / \"Key.sprite3\" / \"680d3e4dce002f922b32447fcf29743d.png\"\n", + ",SPRITE_DIR / \"Keyboard.sprite3\" / \"0ad880b5e829578832c8927b3f6ef7f8.png\"\n", + ",SPRITE_DIR / \"Keyboard.sprite3\" / \"6efd23c91dab070526feacdf72e2d3da.png\"\n", + ",SPRITE_DIR / \"Kia.sprite3\" / \"b3d0a248adbc26b0d0826e042a81670a.png\"\n", + ",SPRITE_DIR / \"Kia.sprite3\" / \"db6cd6b145bb6d8dc299475af7423d6e.png\"\n", + ",SPRITE_DIR / \"Kia.sprite3\" / \"e56e480c994572323d88355b8733e1a3.png\"\n", + ",SPRITE_DIR / \"Kiran.sprite3\" / \"2928e9fbd5ca08e326192b3a41bea691.png\"\n", + ",SPRITE_DIR / \"Kiran.sprite3\" / \"78bd6de23d4929aef678ddf0f3f5c276.png\"\n", + ",SPRITE_DIR / \"Kiran.sprite3\" / \"7912b6f378bd781f62683e003c574dbe.png\"\n", + ",SPRITE_DIR / \"Kiran.sprite3\" / \"7c0bedab5404830a5147cc4a2d46e997.png\"\n", + ",SPRITE_DIR / \"Kiran.sprite3\" / \"7f0bc123819fc2666321b6cd38069bdb.png\"\n", + ",SPRITE_DIR / \"Kiran.sprite3\" / \"b0566e0eed7b5216b92d61468d21ecee.png\"\n", + ",SPRITE_DIR / \"Knight.sprite3\" / \"188325c56b79ff3cd58497c970ba87a6.png\"\n", + ",SPRITE_DIR / \"Ladybug1.sprite3\" / \"169c0efa8c094fdedddf8c19c36f0229.png\"\n", + ",SPRITE_DIR / \"Ladybug2.sprite3\" / \"3f48228829b77fc47d6d89b5729b2957.png\"\n", + ",SPRITE_DIR / \"Ladybug2.sprite3\" / \"457200f8dec8fea00d22473e9bd9175e.png\"\n", + ",SPRITE_DIR / \"Laptop.sprite3\" / \"cd2d1f72275e676df5f82be74ae91dfa.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"0725440743391e7c622bb5df6a94e1d4.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"0a2461b3b9a4b8603e75565d78b1d4d7.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"4423159d81378ada5ffd7f053d7ef471.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"525285312925e1e6b4e237a119b61305.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"563f86443cb102b9241cebb62eb2d81a.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"5f176ef763be18f7c342dc2e2de7bf16.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"63d099e94aa8a973dcfa4c5d8b4a3e7a.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"645d6e2674452009df7a9a844a604791.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"71dde8c43985815bffb5a5ed5632af58.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"79ca528d13ffb557a236f0a35a0eb486.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"b508808c087adb55ce156f5cfbdac61b.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"cdd52259075b75628001672d375e4985.png\"\n", + ",SPRITE_DIR / \"LB Dance.sprite3\" / \"e68d899e178309ff3eae3e1de8a8ec28.png\"\n", + ",SPRITE_DIR / \"Lightning.sprite3\" / \"0ddd3a05a330925bcd2d048908ed40b8.png\"\n", + ",SPRITE_DIR / \"Line.sprite3\" / \"e85305b47cfd92d971704dcb7ad6e17b.png\"\n", + ",SPRITE_DIR / \"Lion.sprite3\" / \"91c64c5361d906fd36d5813ae27b85a8.png\"\n", + ",SPRITE_DIR / \"Lion.sprite3\" / \"e88e83c8b3ca80c54540b5f0c5a0cc03.png\"\n", + ",SPRITE_DIR / \"Lion.sprite3\" / \"f0d9ab3d82bbade6e279dc1c81e2e6db.png\"\n", + ",SPRITE_DIR / \"Llama.sprite3\" / \"1f3aaeb598e121ad817143800d8c4a32.png\"\n", + ",SPRITE_DIR / \"Llama.sprite3\" / \"ac80d75745315f052f7f7b4e62e4a850.png\"\n", + ",SPRITE_DIR / \"Llama.sprite3\" / \"c97824f20a45adfa3ff362f82247a025.png\"\n", + ",SPRITE_DIR / \"Luca.sprite3\" / \"18dfad514602a4907502c7c84861b24e.png\"\n", + ",SPRITE_DIR / \"Luca.sprite3\" / \"90fa2ad340edc6e6ba963710feef940e.png\"\n", + ",SPRITE_DIR / \"Luca.sprite3\" / \"963cb82687acaf5de53a22b287192723.png\"\n", + ",SPRITE_DIR / \"Magic Wand.sprite3\" / \"89aa5332042d7bbf8368293a4efeafa4.png\"\n", + ",SPRITE_DIR / \"Marian.sprite3\" / \"16893c6136292ae36e13dc72cc55719b.png\"\n", + ",SPRITE_DIR / \"Marian.sprite3\" / \"221e9999b20ecc21b37c68fcdf09ab02.png\"\n", + ",SPRITE_DIR / \"Marian.sprite3\" / \"3d2ecee35eab8c37d1c3eadfe50ce447.png\"\n", + ",SPRITE_DIR / \"Marian.sprite3\" / \"64206b46c411e40926569cf3f5e587be.png\"\n", + ",SPRITE_DIR / \"Marian.sprite3\" / \"e9577a1eb098905dd386135bb38c0398.png\"\n", + ",SPRITE_DIR / \"Max.sprite3\" / \"5180649cfd62831c52f8994ce644d6ac.png\"\n", + ",SPRITE_DIR / \"Max.sprite3\" / \"6b91183a4ad162e4950d95828a85144d.png\"\n", + ",SPRITE_DIR / \"Max.sprite3\" / \"7b3d1324382032f87384ef2c8c618156.png\"\n", + ",SPRITE_DIR / \"Max.sprite3\" / \"9669ce16eb6c6df6f26686598a59711d.png\"\n", + ",SPRITE_DIR / \"Mermaid.sprite3\" / \"2a6274017350fab67ebec9157420ae96.png\"\n", + ",SPRITE_DIR / \"Mermaid.sprite3\" / \"65419296861b1c7ee59075af0f949d67.png\"\n", + ",SPRITE_DIR / \"Mermaid.sprite3\" / \"88a3b6b2f0b3ffa25cab97bc619f8386.png\"\n", + ",SPRITE_DIR / \"Mermaid.sprite3\" / \"f903049308e2171178d889f5c4a7d466.png\"\n", + ",SPRITE_DIR / \"Microphone.sprite3\" / \"c96578ffb9e314fee097862d69fde0af.png\"\n", + ",SPRITE_DIR / \"Microphone.sprite3\" / \"d4d80e94e2cc759b8ca1d7b58f2a9052.png\"\n", + ",SPRITE_DIR / \"Milk.sprite3\" / \"0f683f65c737bbcbb916df0895d8436e.png\"\n", + ",SPRITE_DIR / \"Milk.sprite3\" / \"1fa49d62f8028a375470e7bac451e666.png\"\n", + ",SPRITE_DIR / \"Milk.sprite3\" / \"4d3eabd3ef848b61c3120d796c274733.png\"\n", + ",SPRITE_DIR / \"Milk.sprite3\" / \"6ec300ae45758eff12e9d47cf4f0d2a0.png\"\n", + ",SPRITE_DIR / \"Milk.sprite3\" / \"aa5f1501805aa68d3ad74623f59e6135.png\"\n", + ",SPRITE_DIR / \"Monet.sprite3\" / \"137bbc522701a96908667d1b1730d041.png\"\n", + ",SPRITE_DIR / \"Monet.sprite3\" / \"138e6591f3317222521963ef3ce9a057.png\"\n", + ",SPRITE_DIR / \"Monet.sprite3\" / \"4c6b016c55c4348b6dce29ba99e7ede4.png\"\n", + ",SPRITE_DIR / \"Monet.sprite3\" / \"5b67cb843dcc9dabdc580b9e35e95659.png\"\n", + ",SPRITE_DIR / \"Monet.sprite3\" / \"740276a8aa9ddd12dd4b30f369975d66.png\"\n", + ",SPRITE_DIR / \"Monkey.sprite3\" / \"254926ee81bfa82f2db7009a80635061.png\"\n", + ",SPRITE_DIR / \"Monkey.sprite3\" / \"de0405b0576ade1282bdfcd198922baa.png\"\n", + ",SPRITE_DIR / \"Monkey.sprite3\" / \"ec6d62f0ff64bb5440ffdc662b6e46fa.png\"\n", + ",SPRITE_DIR / \"Motorcycle.sprite3\" / \"6e960b3c6a60ebe192e36b235c50ae03.png\"\n", + ",SPRITE_DIR / \"Motorcycle.sprite3\" / \"a70bdd403ace1f1ece2f2af0fbc3c720.png\"\n", + ",SPRITE_DIR / \"Motorcycle.sprite3\" / \"b73447c2577b8f77b5e2eb1da6d6445a.png\"\n", + ",SPRITE_DIR / \"Motorcycle.sprite3\" / \"c6f8179ff3e8f8ab08b01d50343eefc4.png\"\n", + ",SPRITE_DIR / \"Mouse1.sprite3\" / \"8a7da35c473972f88896ca73b7df2188.png\"\n", + ",SPRITE_DIR / \"Mouse1.sprite3\" / \"c5f76b65e30075c12d49ea8a8f7d6bad.png\"\n", + ",SPRITE_DIR / \"Muffin.sprite3\" / \"afa34381db44e699d61f774911aab448.png\"\n", + ",SPRITE_DIR / \"Muffin.sprite3\" / \"bd0581902cd6cc13888520776bf1620c.png\"\n", + ",SPRITE_DIR / \"Nano.sprite3\" / \"8f2f4a70e87262ef478ce60567b6208a.png\"\n", + ",SPRITE_DIR / \"Nano.sprite3\" / \"a4e2034751fa650fd5fd69432c110104.png\"\n", + ",SPRITE_DIR / \"Nano.sprite3\" / \"a62e560863c0e49b12e5d57e13d084f1.png\"\n", + ",SPRITE_DIR / \"Nano.sprite3\" / \"d12aead3e3c2917e7eba8b2b90a7afd2.png\"\n", + ",SPRITE_DIR / \"Neigh Pony.sprite3\" / \"592816f56409d582603c485cbefcbbb8.png\"\n", + ",SPRITE_DIR / \"Noor.sprite3\" / \"4cf233c6540e434aded60608ba316ce3.png\"\n", + ",SPRITE_DIR / \"Noor.sprite3\" / \"975585ca9461f0730a285fc96df73425.png\"\n", + ",SPRITE_DIR / \"Noor.sprite3\" / \"c1792bbd5970034b4595ff7e742d6e47.png\"\n", + ",SPRITE_DIR / \"Octopus.sprite3\" / \"5d6e17d6260134d0402ba487a419d7c3.png\"\n", + ",SPRITE_DIR / \"Octopus.sprite3\" / \"7d33a531087188b29deae879f23f76bc.png\"\n", + ",SPRITE_DIR / \"Octopus.sprite3\" / \"9b5a2cd287229bf36ffcc176ed72cc0c.png\"\n", + ",SPRITE_DIR / \"Octopus.sprite3\" / \"e22d9b633feffc1d026980a1f21e07d7.png\"\n", + ",SPRITE_DIR / \"Octopus.sprite3\" / \"f582f162c4438d82c9e2a0a87a3e02ce.png\"\n", + ",SPRITE_DIR / \"Orange.sprite3\" / \"d0a55aae1decb57152b454c9a5226757.png\"\n", + ",SPRITE_DIR / \"Orange2.sprite3\" / \"27286ca08451bc512e1d611965dad061.png\"\n", + ",SPRITE_DIR / \"Orange2.sprite3\" / \"b823f73a31e61fd362574e2c24dfc0c2.png\"\n", + ",SPRITE_DIR / \"Outfielder.sprite3\" / \"10578b06f97b9fdc34f622e9e682c144.png\"\n", + ",SPRITE_DIR / \"Outfielder.sprite3\" / \"175ddc7ed99cc5b72909098046d8f558.png\"\n", + ",SPRITE_DIR / \"Outfielder.sprite3\" / \"9f31c772f88a5f32fe857d57b3bcb04c.png\"\n", + ",SPRITE_DIR / \"Outfielder.sprite3\" / \"d0a8837867d39444a824b734d4cd5554.png\"\n", + ",SPRITE_DIR / \"Owl.sprite3\" / \"236bb6b33e7db00834bcea89b03b8a5e.png\"\n", + ",SPRITE_DIR / \"Owl.sprite3\" / \"806139207066cb5eaef727d54c1bb4ec.png\"\n", + ",SPRITE_DIR / \"Owl.sprite3\" / \"a518f70b65ec489e709795209b43207a.png\"\n", + ",SPRITE_DIR / \"Paddle.sprite3\" / \"15864fac7d38bb94c1ec3a199de96c26.png\"\n", + ",SPRITE_DIR / \"Panther.sprite3\" / \"0e7c244f54b27058f8b17d9e0d3cee12.png\"\n", + ",SPRITE_DIR / \"Panther.sprite3\" / \"4a762fd04901407544d8858adac2b3fa.png\"\n", + ",SPRITE_DIR / \"Panther.sprite3\" / \"a7aee991f51636574625c1300f035bdd.png\"\n", + ",SPRITE_DIR / \"Pants.sprite3\" / \"ac9c7259873e472c2c1a99339c694f16.png\"\n", + ",SPRITE_DIR / \"Pants.sprite3\" / \"ef8b1576f183222a4c2d373a7bc194cc.png\"\n", + ",SPRITE_DIR / \"Parrot.sprite3\" / \"036fad20b674197358f8c0b2dc64e17e.png\"\n", + ",SPRITE_DIR / \"Parrot.sprite3\" / \"082f371c206f07d20e53595a9c69cc22.png\"\n", + ",SPRITE_DIR / \"Party Hats.sprite3\" / \"1d14be44e4aa99a471115cd874204690.png\"\n", + ",SPRITE_DIR / \"Party Hats.sprite3\" / \"8b43413906cf1ba1343580d3ca062048.png\"\n", + ",SPRITE_DIR / \"Party Hats.sprite3\" / \"abefb98344ece228afeb462f46d6b750.png\"\n", + ",SPRITE_DIR / \"Pencil.sprite3\" / \"b3d6eae85f285dd618bf9dcf609b9454.png\"\n", + ",SPRITE_DIR / \"Pencil.sprite3\" / \"f017876452a24d118fc0b1753caefad9.png\"\n", + ",SPRITE_DIR / \"Penguin 2.sprite3\" / \"280d2aa13f0c6774cc8828dc177aaf60.png\"\n", + ",SPRITE_DIR / \"Penguin 2.sprite3\" / \"428772307d90f4b347d6cc3c0d8e76ef.png\"\n", + ",SPRITE_DIR / \"Penguin 2.sprite3\" / \"780467f3d173dcb37fd65834841babc6.png\"\n", + ",SPRITE_DIR / \"Penguin 2.sprite3\" / \"d485f5620d2dde69a6aa1cda7c897d12.png\"\n", + ",SPRITE_DIR / \"Penguin.sprite3\" / \"6d11aedea7f316215aaa0d08617f4c31.png\"\n", + ",SPRITE_DIR / \"Penguin.sprite3\" / \"c434b674f2da18ba13cdfe51dbc05ecc.png\"\n", + ",SPRITE_DIR / \"Penguin.sprite3\" / \"dad5b0d82cb6e053d1ded2ef537a9453.png\"\n", + ",SPRITE_DIR / \"Pico Walking.sprite3\" / \"22fb16ae7cc18187a7adaf2852f07884.png\"\n", + ",SPRITE_DIR / \"Pico Walking.sprite3\" / \"52a60eccb624530fd3a24fc41fbad6e5.png\"\n", + ",SPRITE_DIR / \"Pico Walking.sprite3\" / \"702bd644d01ea8eda2ea122daeea7d74.png\"\n", + ",SPRITE_DIR / \"Pico Walking.sprite3\" / \"c8f58f31cabf4acabb3f828730061276.png\"\n", + ",SPRITE_DIR / \"Pico.sprite3\" / \"a7597b1f0c13455d335a3d4fe77da528.png\"\n", + ",SPRITE_DIR / \"Pico.sprite3\" / \"bcc0e8a5dda3a813608902b887c87bb4.png\"\n", + ",SPRITE_DIR / \"Pico.sprite3\" / \"d6dfa2efe58939af4c85755feb3c0375.png\"\n", + ",SPRITE_DIR / \"Pico.sprite3\" / \"e7ce31db37f7abd2901499db2e9ad83a.png\"\n", + ",SPRITE_DIR / \"Pitcher.sprite3\" / \"049132404cb2cb157830aaf18aee6a24.png\"\n", + ",SPRITE_DIR / \"Pitcher.sprite3\" / \"ae8aa57ce6e5729d30d8b785bec97774.png\"\n", + ",SPRITE_DIR / \"Pitcher.sprite3\" / \"bceae719ba1ec230afec56f14a1e4d52.png\"\n", + ",SPRITE_DIR / \"Pitcher.sprite3\" / \"fc955dec7f1e97f1ddd9f8245a80907e.png\"\n", + ",SPRITE_DIR / \"Planet2.sprite3\" / \"50cde8a4a737da0eba1ab73eb263f836.png\"\n", + ",SPRITE_DIR / \"Polar Bear.sprite3\" / \"11d00a06abd2c882672464f4867e90b6.png\"\n", + ",SPRITE_DIR / \"Polar Bear.sprite3\" / \"5d7cd81aad80100368b8b77bf09ad576.png\"\n", + ",SPRITE_DIR / \"Polar Bear.sprite3\" / \"d050a3394b61ade080f7963c40192e7d.png\"\n", + ",SPRITE_DIR / \"Potion.sprite3\" / \"0eceab4561534dde827bf68233f47441.png\"\n", + ",SPRITE_DIR / \"Potion.sprite3\" / \"d922ffdfe38fd30fd8787810c6bce318.png\"\n", + ",SPRITE_DIR / \"Potion.sprite3\" / \"f8500e9530bf1136c6386f2a329519dd.png\"\n", + ",SPRITE_DIR / \"Prince.sprite3\" / \"ada9c5ce11245c467c780bceb665c42d.png\"\n", + ",SPRITE_DIR / \"Princess.sprite3\" / \"0721f5238a2bcde49d05f72ca9d21d9b.png\"\n", + ",SPRITE_DIR / \"Princess.sprite3\" / \"23330150c0a09180083b597cbfeca99a.png\"\n", + ",SPRITE_DIR / \"Princess.sprite3\" / \"39157d5d3280ab0b273260170d5436c2.png\"\n", + ",SPRITE_DIR / \"Princess.sprite3\" / \"ba37f578cc6cabce6fe4d2864c9eb96f.png\"\n", + ",SPRITE_DIR / \"Princess.sprite3\" / \"e59f55c86ea557bdbd88302012ce8db5.png\"\n", + ",SPRITE_DIR / \"Pufferfish.sprite3\" / \"1b4f39763c9848cc840522b95cc6d8ae.png\"\n", + ",SPRITE_DIR / \"Pufferfish.sprite3\" / \"2266c6bb2c3a8fb80783518a08852b4a.png\"\n", + ",SPRITE_DIR / \"Pufferfish.sprite3\" / \"b8aa1bd46eacc054c695b89167c3ad28.png\"\n", + ",SPRITE_DIR / \"Pufferfish.sprite3\" / \"e73e71718306f6c7085305dba142c315.png\"\n", + ",SPRITE_DIR / \"Puppy.sprite3\" / \"05630bfa94501a3e5d61ce443a0cea70.png\"\n", + ",SPRITE_DIR / \"Puppy.sprite3\" / \"2768d9e44a0aab055856d301bbc2b04e.png\"\n", + ",SPRITE_DIR / \"Puppy.sprite3\" / \"c4aeb5c39b39ef57a3f18ace54cf7db1.png\"\n", + ",SPRITE_DIR / \"Puppy.sprite3\" / \"c7817052ed9e78057f877d0d56b5c6a6.png\"\n", + ",SPRITE_DIR / \"Rabbit.sprite3\" / \"137976ec71439e2f986caeaa70e4c932.png\"\n", + ",SPRITE_DIR / \"Rabbit.sprite3\" / \"1ca3f829a2c9f7fa4d1df295fe5f787c.png\"\n", + ",SPRITE_DIR / \"Rabbit.sprite3\" / \"49169d752f20d27fb71022b16044d759.png\"\n", + ",SPRITE_DIR / \"Rabbit.sprite3\" / \"90677c6f16380ef077d6115f6a6371ff.png\"\n", + ",SPRITE_DIR / \"Rabbit.sprite3\" / \"970f886bfa454e1daa6d6c30ef49a972.png\"\n", + ",SPRITE_DIR / \"Radio.sprite3\" / \"828f0762d028605f6fe52f9287555b74.png\"\n", + ",SPRITE_DIR / \"Radio.sprite3\" / \"e96676f038fc523b40392dc1676552dc.png\"\n", + ",SPRITE_DIR / \"Rainbow.sprite3\" / \"033979eba12e4572b2520bd93a87583e.png\"\n", + ",SPRITE_DIR / \"Referee.sprite3\" / \"1cd641a48499db84636d983916b62a83.png\"\n", + ",SPRITE_DIR / \"Referee.sprite3\" / \"46dde2baba61a7e48463ae8e58441470.png\"\n", + ",SPRITE_DIR / \"Referee.sprite3\" / \"5948c4160089fcc0975a867221ff2256.png\"\n", + ",SPRITE_DIR / \"Referee.sprite3\" / \"7eeca5313c2e7d455482badff3079f64.png\"\n", + ",SPRITE_DIR / \"Reindeer.sprite3\" / \"60993a025167e7886736109dca5d55e2.png\"\n", + ",SPRITE_DIR / \"Retro Robot.sprite3\" / \"35070c1078c4eec153ea2769516c922c.png\"\n", + ",SPRITE_DIR / \"Retro Robot.sprite3\" / \"53398a713b144ecda6ec32fb4a8d28e1.png\"\n", + ",SPRITE_DIR / \"Retro Robot.sprite3\" / \"d139f89665962dcaab4cb2b246359ba1.png\"\n", + ",SPRITE_DIR / \"Ripley.sprite3\" / \"043373c51689f3df8bf50eb12c4e3d39.png\"\n", + ",SPRITE_DIR / \"Ripley.sprite3\" / \"3ab169f52ea3783270d28ef035a5a7c5.png\"\n", + ",SPRITE_DIR / \"Ripley.sprite3\" / \"8e173178d886d1cb272877e8923d651b.png\"\n", + ",SPRITE_DIR / \"Ripley.sprite3\" / \"90feaffe3d0c4d31287d57bd1bc64afa.png\"\n", + ",SPRITE_DIR / \"Ripley.sprite3\" / \"e751d0a781694897f75046eb2810e9a5.png\"\n", + ",SPRITE_DIR / \"Ripley.sprite3\" / \"f798adaf44e8891c5e2f1b2a82a613b2.png\"\n", + ",SPRITE_DIR / \"Robot.sprite3\" / \"10060b3b58c77345cfe92288a46e5c20.png\"\n", + ",SPRITE_DIR / \"Robot.sprite3\" / \"36d1098b880dbe47e58d93e7b2842381.png\"\n", + ",SPRITE_DIR / \"Robot.sprite3\" / \"4f5441207afc9bc075b0b404dbba8b59.png\"\n", + ",SPRITE_DIR / \"Robot.sprite3\" / \"89679608327ad572b93225d06fe9edda.png\"\n", + ",SPRITE_DIR / \"Rocketship.sprite3\" / \"10f83786e5ee34f40ee43b49bba89ee2.png\"\n", + ",SPRITE_DIR / \"Rocketship.sprite3\" / \"49ee475c516a444d8a512724063b8b98.png\"\n", + ",SPRITE_DIR / \"Rocketship.sprite3\" / \"525c06ceb3a351244bcd810c9ba951c7.png\"\n", + ",SPRITE_DIR / \"Rocketship.sprite3\" / \"5682c68af2cc8aea791f0373e9ed03d8.png\"\n", + ",SPRITE_DIR / \"Rocketship.sprite3\" / \"a6ff2f1344a18cc0a4bcc945e00afaf4.png\"\n", + ",SPRITE_DIR / \"Rocks.sprite3\" / \"55426ccbb5c49b1526e53586943f3ec3.png\"\n", + ",SPRITE_DIR / \"Rooster.sprite3\" / \"0ae345deb1c81ec7f4f4644c26ac85fa.png\"\n", + ",SPRITE_DIR / \"Rooster.sprite3\" / \"6490360bd5d6efd2b646fb24c19df6b1.png\"\n", + ",SPRITE_DIR / \"Rooster.sprite3\" / \"bd5f701c99aa6512bac7b87c51e7cd46.png\"\n", + ",SPRITE_DIR / \"Ruby.sprite3\" / \"c30210e8f719c3a4d2c7cc6917a39300.png\"\n", + ",SPRITE_DIR / \"Ruby.sprite3\" / \"fc15fdbcc535473f6140cab28197f3be.png\"\n", + ",SPRITE_DIR / \"Sailboat.sprite3\" / \"ca241a938a2c44a0de6b91230012ff39.png\"\n", + ",SPRITE_DIR / \"Sam.sprite3\" / \"8208e99159b36c957fb9fbc187e51bc7.png\"\n", + ",SPRITE_DIR / \"Sasha.sprite3\" / \"89bb25e1465eb9481d267e4f9df592af.png\"\n", + ",SPRITE_DIR / \"Sasha.sprite3\" / \"a0b8890ce458aebed5e7002e1897508e.png\"\n", + ",SPRITE_DIR / \"Sasha.sprite3\" / \"e26bf53469cafd730ca150e745ceeafc.png\"\n", + ",SPRITE_DIR / \"Saxophone.sprite3\" / \"4414c51bdd03f60f40a1210e1d55cf57.png\"\n", + ",SPRITE_DIR / \"Saxophone.sprite3\" / \"459a64bebb7a788395c70e5369ab4746.png\"\n", + ",SPRITE_DIR / \"Scarf.sprite3\" / \"05b06ab8d2c6e2110896d70bb60a9fd7.png\"\n", + ",SPRITE_DIR / \"Scarf.sprite3\" / \"213db212d5d0c602f85cb248719ce785.png\"\n", + ",SPRITE_DIR / \"Scarf.sprite3\" / \"4a85e4e6232f12abf9802bec4aa419b3.png\"\n", + ",SPRITE_DIR / \"Shark 2.sprite3\" / \"6182a0628eadf2d16624864bea964432.png\"\n", + ",SPRITE_DIR / \"Shark 2.sprite3\" / \"7f4440b268358417aa79ccef06877c57.png\"\n", + ",SPRITE_DIR / \"Shark 2.sprite3\" / \"8a8d551e951087050cfa88fc64f9b4db.png\"\n", + ",SPRITE_DIR / \"Shark.sprite3\" / \"6c8008ae677ec51af8da5023fa2cd521.png\"\n", + ",SPRITE_DIR / \"Shark.sprite3\" / \"b769db8fcbbf2609f0552db62ec1f94a.png\"\n", + ",SPRITE_DIR / \"Shirt.sprite3\" / \"43e916bbe0ba7cecd08407d25ac3d104.png\"\n", + ",SPRITE_DIR / \"Shoes.sprite3\" / \"1e813a1618f38212a6febaa7e6b8d712.png\"\n", + ",SPRITE_DIR / \"Shoes.sprite3\" / \"71b5a444d482455e9956cfd52d20526a.png\"\n", + ",SPRITE_DIR / \"Shoes.sprite3\" / \"724d9a8984279949ce452fc9b2e437a6.png\"\n", + ",SPRITE_DIR / \"Shoes.sprite3\" / \"f89f1656251248f1591aa67ae946c047.png\"\n", + ",SPRITE_DIR / \"Shorts.sprite3\" / \"4d5f7a13ed20dc4f8fd194a7eb3f625f.png\"\n", + ",SPRITE_DIR / \"Shorts.sprite3\" / \"d5fc56b7247f079e5821d74d3e91e7a6.png\"\n", + ",SPRITE_DIR / \"Shorts.sprite3\" / \"ea78ad682811f9c42731ec648ec7af3c.png\"\n", + ",SPRITE_DIR / \"Singer1.sprite3\" / \"d6ff94dc7e24200c28015ee5d6373140.png\"\n", + ",SPRITE_DIR / \"Skeleton.sprite3\" / \"3cfff37072a4138b977ba406c290b419.png\"\n", + ",SPRITE_DIR / \"Skeleton.sprite3\" / \"67108e6b1d0f41aba2f94f81114ebf59.png\"\n", + ",SPRITE_DIR / \"Skeleton.sprite3\" / \"c4d755c672a0826caa7b6fb767cc3f9b.png\"\n", + ",SPRITE_DIR / \"Skeleton.sprite3\" / \"f4a00b2bd214b1d8412a2e89b2030354.png\"\n", + ",SPRITE_DIR / \"Snake.sprite3\" / \"42519e0ee19d75def88a514d3c49ce37.png\"\n", + ",SPRITE_DIR / \"Snake.sprite3\" / \"a0acb49efdf60b20cea0833eeedd44a1.png\"\n", + ",SPRITE_DIR / \"Snake.sprite3\" / \"f0e6ebdbdc8571b42f8a48cc2aed3042.png\"\n", + ",SPRITE_DIR / \"Snowflake.sprite3\" / \"083735cc9cd0e6d8c3dbab5ab9ee5407.png\"\n", + ",SPRITE_DIR / \"Snowman.sprite3\" / \"0f109df620f935b94cb154101e6586d4.png\"\n", + ",SPRITE_DIR / \"Soccer Ball.sprite3\" / \"5d973d7a3a8be3f3bd6e1cd0f73c32b5.png\"\n", + ",SPRITE_DIR / \"Speaker.sprite3\" / \"697f6becae5321f77990636564ef0c97.png\"\n", + ",SPRITE_DIR / \"Squirrel.sprite3\" / \"b86efb7f23387300cf9037a61f328ab9.png\"\n", + ",SPRITE_DIR / \"Star.sprite3\" / \"551629f2a64c1f3703e57aaa133effa6.png\"\n", + ",SPRITE_DIR / \"Starfish.sprite3\" / \"69dca6e42d45d3fef89f81de40b11bef.png\"\n", + ",SPRITE_DIR / \"Starfish.sprite3\" / \"be2ca55a5688670302e7c3f79d5040d1.png\"\n", + ",SPRITE_DIR / \"Stop.sprite3\" / \"1e2c3987e4cdb1f317b1773662719b13.png\"\n", + ",SPRITE_DIR / \"Story-A.sprite3\" / \"3c46f5192d2c29f957381e0100c6085d.png\"\n", + ",SPRITE_DIR / \"Story-A.sprite3\" / \"4b1beecd9a8892df0918242b2b5fbd4c.png\"\n", + ",SPRITE_DIR / \"Story-A.sprite3\" / \"7a6fdf5e26fc690879f8e215bfdec4d5.png\"\n", + ",SPRITE_DIR / \"Story-B.sprite3\" / \"22817ed2e4253787c78d7b696bbefdc1.png\"\n", + ",SPRITE_DIR / \"Story-B.sprite3\" / \"5f8301434ce176ab328f5b658ee1ec05.png\"\n", + ",SPRITE_DIR / \"Story-B.sprite3\" / \"a09376e1eacf17be3c9fbd268674b9f7.png\"\n", + ",SPRITE_DIR / \"Story-C.sprite3\" / \"5e61610cbba50ba86f18830f61bbaecb.png\"\n", + ",SPRITE_DIR / \"Story-C.sprite3\" / \"6bd5cb8bc3e4df5e055f4c56dd630855.png\"\n", + ",SPRITE_DIR / \"Story-C.sprite3\" / \"f6ff602902affbae2f89b389f08df432.png\"\n", + ",SPRITE_DIR / \"Story-D.sprite3\" / \"130cc4b9ad8dd8936d22c51c05ac6860.png\"\n", + ",SPRITE_DIR / \"Story-D.sprite3\" / \"b28d76f648ad24932a18cb40c8d76bc5.png\"\n", + ",SPRITE_DIR / \"Story-D.sprite3\" / \"dd713e3bf42d7a4fd8d2f12094db1c63.png\"\n", + ",SPRITE_DIR / \"Story-E.sprite3\" / \"3005df22798da45f1daf1de7421bb91d.png\"\n", + ",SPRITE_DIR / \"Story-E.sprite3\" / \"4e903ac41a7e16a52efff8477f2398c7.png\"\n", + ",SPRITE_DIR / \"Story-E.sprite3\" / \"add5c5a8eec67eb010b5cbd44dea5c8d.png\"\n", + ",SPRITE_DIR / \"Story-F.sprite3\" / \"4a3ae31dd3dd3b96239a0307cfdaa1b6.png\"\n", + ",SPRITE_DIR / \"Story-F.sprite3\" / \"83565581ecc9f7d4010efd8683a99393.png\"\n", + ",SPRITE_DIR / \"Story-F.sprite3\" / \"d4ec9a1827429f4e2f3dc239dcc15b95.png\"\n", + ",SPRITE_DIR / \"Story-G.sprite3\" / \"648cfdd48a7f748e6198194669ba1909.png\"\n", + ",SPRITE_DIR / \"Story-G.sprite3\" / \"85144902cc61fe98dca513b74276d7d8.png\"\n", + ",SPRITE_DIR / \"Story-G.sprite3\" / \"8fb61932544adbe8c95b067ad1351758.png\"\n", + ",SPRITE_DIR / \"Story-H.sprite3\" / \"70520daa9f82a2347c8a8fa9e7fe1a6e.png\"\n", + ",SPRITE_DIR / \"Story-H.sprite3\" / \"99aae97a2b49904db7eeb813fa968582.png\"\n", + ",SPRITE_DIR / \"Story-H.sprite3\" / \"eec286b1cfea3f219a5b486931abedd2.png\"\n", + ",SPRITE_DIR / \"Story-I.sprite3\" / \"1bceea90292a51a7177abf581f28bf2c.png\"\n", + ",SPRITE_DIR / \"Story-I.sprite3\" / \"2c156e20da1ad4e8e397a89ad8fb1c26.png\"\n", + ",SPRITE_DIR / \"Story-I.sprite3\" / \"9cad752323aa81dfa8d8cf009057b108.png\"\n", + ",SPRITE_DIR / \"Story-J.sprite3\" / \"2838de5d131785c985eb0eab25ec63af.png\"\n", + ",SPRITE_DIR / \"Story-J.sprite3\" / \"7d7d6f257a6bf3668a0befa4199f16a0.png\"\n", + ",SPRITE_DIR / \"Story-J.sprite3\" / \"d5b58ddd6f6b4fdcfdfd86d102853935.png\"\n", + ",SPRITE_DIR / \"Story-K.sprite3\" / \"0cb908dbc38635cc595e6060afc1b682.png\"\n", + ",SPRITE_DIR / \"Story-K.sprite3\" / \"17ef8f63a2a8f47258bd62cf642fd8d6.png\"\n", + ",SPRITE_DIR / \"Story-K.sprite3\" / \"ecf86afea23fd95e27d4e63659adbfa6.png\"\n", + ",SPRITE_DIR / \"Story-L.sprite3\" / \"0fc3ac08468935694255ef8a461d4d26.png\"\n", + ",SPRITE_DIR / \"Story-L.sprite3\" / \"935c7cf21c35523c0a232013a6399a49.png\"\n", + ",SPRITE_DIR / \"Story-L.sprite3\" / \"ec4d85a60c32c7637de31dbf503266a0.png\"\n", + ",SPRITE_DIR / \"Story-M.sprite3\" / \"42e5468fa164e001925d5a49d372f4b1.png\"\n", + ",SPRITE_DIR / \"Story-M.sprite3\" / \"643896fcad0a1bf6eb9f3f590094687c.png\"\n", + ",SPRITE_DIR / \"Story-M.sprite3\" / \"9bf9e677da34528433d3c1acb945e2df.png\"\n", + ",SPRITE_DIR / \"Story-N.sprite3\" / \"40ffad793f4042a5fe7b3aaa6bc175ae.png\"\n", + ",SPRITE_DIR / \"Story-N.sprite3\" / \"80c8f32282b697097933837905a6f257.png\"\n", + ",SPRITE_DIR / \"Story-N.sprite3\" / \"c2f77473dd16d1a3713218b05390a688.png\"\n", + ",SPRITE_DIR / \"Story-O.sprite3\" / \"0bdd31ea2b3b78d0c39022795a49c69a.png\"\n", + ",SPRITE_DIR / \"Story-O.sprite3\" / \"40bf3880b678beeda8cf708a51a4402d.png\"\n", + ",SPRITE_DIR / \"Story-O.sprite3\" / \"43a89fc1442627ca48b1dc631c517942.png\"\n", + ",SPRITE_DIR / \"Story-P.sprite3\" / \"1a41f74cd76d7202d8b22ffc7729e03f.png\"\n", + ",SPRITE_DIR / \"Story-P.sprite3\" / \"377eac55366670a03c469705c6689f09.png\"\n", + ",SPRITE_DIR / \"Story-P.sprite3\" / \"9cf707e83af27c47e74adb77496ffca5.png\"\n", + ",SPRITE_DIR / \"Story-Q.sprite3\" / \"01acd1076994a4379a3fc9e034bc05fc.png\"\n", + ",SPRITE_DIR / \"Story-Q.sprite3\" / \"84a6dc992bce018a1eac9be0173ad917.png\"\n", + ",SPRITE_DIR / \"Story-Q.sprite3\" / \"efc27a91c30d6a511be4245e36684192.png\"\n", + ",SPRITE_DIR / \"Story-R.sprite3\" / \"3c3f44aba3eff8856472e06b333a7201.png\"\n", + ",SPRITE_DIR / \"Story-R.sprite3\" / \"4f217b14a161fcd9590614b0733100ea.png\"\n", + ",SPRITE_DIR / \"Story-R.sprite3\" / \"5c1d38d02ae9c4df7851a6e9d52f25b4.png\"\n", + ",SPRITE_DIR / \"Story-S.sprite3\" / \"47b9f910048ce4db93bdfbcd2638e19a.png\"\n", + ",SPRITE_DIR / \"Story-S.sprite3\" / \"5a113fcacd35ababbf23c5a9289433d1.png\"\n", + ",SPRITE_DIR / \"Story-S.sprite3\" / \"fd2a94481c3ef0c223784b2f3c6df874.png\"\n", + ",SPRITE_DIR / \"Story-T.sprite3\" / \"001a2186db228fdd9bfbf3f15800bb63.png\"\n", + ",SPRITE_DIR / \"Story-T.sprite3\" / \"66b22b0ff0a5c1c205a701316ab954cf.png\"\n", + ",SPRITE_DIR / \"Story-T.sprite3\" / \"b61e1ac30aa2f35d4fd8c23fab1f76ea.png\"\n", + ",SPRITE_DIR / \"Story-U.sprite3\" / \"51dd73c840ba3aca0f9770e13cb14fb3.png\"\n", + ",SPRITE_DIR / \"Story-U.sprite3\" / \"cfb334b977b8f2a39aa56b1e0532829e.png\"\n", + ",SPRITE_DIR / \"Story-U.sprite3\" / \"f6b7b4da5362fdac29d84f1fbf19e3f4.png\"\n", + ",SPRITE_DIR / \"Story-V.sprite3\" / \"43a8993221848f90e9f37664e7832b4a.png\"\n", + ",SPRITE_DIR / \"Story-V.sprite3\" / \"d5c20886e3eb0ca0f5430c9482b1d832.png\"\n", + ",SPRITE_DIR / \"Story-V.sprite3\" / \"f27e7a4216665a6eab43fe9b4b5ec934.png\"\n", + ",SPRITE_DIR / \"Story-W.sprite3\" / \"396e27d20d1a49edaa106ba6d667cedd.png\"\n", + ",SPRITE_DIR / \"Story-W.sprite3\" / \"528df57da4490f6da8c75da06a1367f5.png\"\n", + ",SPRITE_DIR / \"Story-W.sprite3\" / \"f21ba826cd88c376e868f079d6df273c.png\"\n", + ",SPRITE_DIR / \"Story-X.sprite3\" / \"04be1176e562eff16f1159f69945a82e.png\"\n", + ",SPRITE_DIR / \"Story-X.sprite3\" / \"ca4e3e84788bdeea42dd5ed952d5a66c.png\"\n", + ",SPRITE_DIR / \"Story-X.sprite3\" / \"db0c1a6499169aac6639a1a0076658ce.png\"\n", + ",SPRITE_DIR / \"Story-Y.sprite3\" / \"093a9410933f7d01f459f08bcb01735b.png\"\n", + ",SPRITE_DIR / \"Story-Y.sprite3\" / \"59275f907633ce02074f787e5767bfde.png\"\n", + ",SPRITE_DIR / \"Story-Y.sprite3\" / \"d7fabe2652c93dd1bf91d9064cf5a348.png\"\n", + ",SPRITE_DIR / \"Story-Z.sprite3\" / \"23c24dbee23b1545afa8ee15ed339327.png\"\n", + ",SPRITE_DIR / \"Story-Z.sprite3\" / \"34825a171f7b35962484fa53e99ff632.png\"\n", + ",SPRITE_DIR / \"Story-Z.sprite3\" / \"665db4c356d7e010fa8d71cc291834e3.png\"\n", + ",SPRITE_DIR / \"Strawberry.sprite3\" / \"10ed1486ff4bab3eebb3b8ae55d81ccd.png\"\n", + ",SPRITE_DIR / \"Strawberry.sprite3\" / \"2fa57942dc7ded7eddc4d41554768d67.png\"\n", + ",SPRITE_DIR / \"Strawberry.sprite3\" / \"662279c12965d2913a060a55aebec496.png\"\n", + ",SPRITE_DIR / \"Strawberry.sprite3\" / \"aa4eae20c750900e4f63e6ede4083d81.png\"\n", + ",SPRITE_DIR / \"Strawberry.sprite3\" / \"f5008785e74590689afca4b578d108a4.png\"\n", + ",SPRITE_DIR / \"Sun.sprite3\" / \"406808d86aff20a15d592b308e166a32.png\"\n", + ",SPRITE_DIR / \"Sunglasses1.sprite3\" / \"c95a05c3bed665027d267d93454c428a.png\"\n", + ",SPRITE_DIR / \"Sunglasses1.sprite3\" / \"dc568ae1f8b9b6544f0634ef975a7098.png\"\n", + ",SPRITE_DIR / \"Taco.sprite3\" / \"383ea1ef802bc2706670536cfa8271b7.png\"\n", + ",SPRITE_DIR / \"Taco.sprite3\" / \"c97113d17afeaac9f461ea0ec257ef26.png\"\n", + ",SPRITE_DIR / \"Takeout.sprite3\" / \"24cc271fd6cf55f25b71e78faf749a98.png\"\n", + ",SPRITE_DIR / \"Takeout.sprite3\" / \"2b32d6a4a724c38bfaeb494d30827f19.png\"\n", + ",SPRITE_DIR / \"Takeout.sprite3\" / \"40f63eb18230c4defa9051830beffb0f.png\"\n", + ",SPRITE_DIR / \"Takeout.sprite3\" / \"9202a59888545c56c864bacb700c4297.png\"\n", + ",SPRITE_DIR / \"Takeout.sprite3\" / \"e03cd6e668e0eeddb2da98a095e2f30f.png\"\n", + ",SPRITE_DIR / \"Tatiana.sprite3\" / \"5cf65a9f942ca92c93915527ff9db1e6.png\"\n", + ",SPRITE_DIR / \"Tatiana.sprite3\" / \"91fb7d056beaf553ccec03d61d72c545.png\"\n", + ",SPRITE_DIR / \"Tatiana.sprite3\" / \"e207fd3f99e1db8c5d66f49446f27e37.png\"\n", + ",SPRITE_DIR / \"Tatiana.sprite3\" / \"e2ea6bbc6066574d4836e808a1c5f849.png\"\n", + ",SPRITE_DIR / \"Taylor.sprite3\" / \"a504d785629f2d1ca6b87e80b334d5e8.png\"\n", + ",SPRITE_DIR / \"Taylor.sprite3\" / \"ae2eaae0882543dc276c8e7d56ff2e7b.png\"\n", + ",SPRITE_DIR / \"Taylor.sprite3\" / \"e0082f49fc5d0d83d7fad6124ba82bb1.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"279bd5499329f98a68cf92c68014e198.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"377b8521c436f4f39ed2100fa1cb7c2f.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"3c9a7eac1d696ae74ee40c6efa8fa4dd.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"548bdf23904e409c1fcc0992f44d0b4c.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"580fba92f23d5592200eb5a9079dc38f.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"8313a2229d555bbdb8ce92dffed067ad.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"86602007ae2952236d47d7fd587a56b6.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"b2f75ac1cd84615efaea6a7d7a4ee205.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"ce2141ce97921ddc333bc65ff5bec27d.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"e06ac61e96e3a5abf4ca0863816f5d28.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"e51942bb4651e616549cfce1ad36ff83.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"f60f99278455c843b7833fb7615428dd.png\"\n", + ",SPRITE_DIR / \"Ten80 Dance.sprite3\" / \"fea7045c09073700b88fae8d4d257cd1.png\"\n", + ",SPRITE_DIR / \"Tennis Ball.sprite3\" / \"34fa36004be0340ec845ba6bbeb5e5d5.png\"\n", + ",SPRITE_DIR / \"Tera.sprite3\" / \"18f9a11ecdbd3ad8719beb176c484d41.png\"\n", + ",SPRITE_DIR / \"Tera.sprite3\" / \"2daca5f43efc2d29fb089879448142e9.png\"\n", + ",SPRITE_DIR / \"Tera.sprite3\" / \"365d4de6c99d71f1370f7c5e636728af.png\"\n", + ",SPRITE_DIR / \"Tera.sprite3\" / \"5456a723f3b35eaa946b974a59888793.png\"\n", + ",SPRITE_DIR / \"Toucan.sprite3\" / \"72952d831d0b67c9d056b44a4bc3d0ae.png\"\n", + ",SPRITE_DIR / \"Toucan.sprite3\" / \"9eef2e49b3bbf371603ae783cd82db3c.png\"\n", + ",SPRITE_DIR / \"Toucan.sprite3\" / \"b6345d7386021ee85bb17f8aa4950eed.png\"\n", + ",SPRITE_DIR / \"Trampoline.sprite3\" / \"8fa3c6fcff2f25f5fe7842d68dcfe5cf.png\"\n", + ",SPRITE_DIR / \"Tree1.sprite3\" / \"d04b15886635101db8220a4361c0c88d.png\"\n", + ",SPRITE_DIR / \"Trees.sprite3\" / \"04758bd432a8b1cab527bddf14432147.png\"\n", + ",SPRITE_DIR / \"Trees.sprite3\" / \"551b3fae8eab06b49013f54009a7767a.png\"\n", + ",SPRITE_DIR / \"Trisha.sprite3\" / \"2d06023ec09ec312ab49055530511134.png\"\n", + ",SPRITE_DIR / \"Trisha.sprite3\" / \"55d31103bc86447c6a727b4f0664a5ea.png\"\n", + ",SPRITE_DIR / \"Trisha.sprite3\" / \"c31dc8487a841f644889784ff437e2c5.png\"\n", + ",SPRITE_DIR / \"Truck.sprite3\" / \"63b00424bdabc3459e5bc554c6c21e06.png\"\n", + ",SPRITE_DIR / \"Truck.sprite3\" / \"aaa05abc5aa182a0d7bfdc6db0f3207a.png\"\n", + ",SPRITE_DIR / \"Truck.sprite3\" / \"ce077e6db3573062017f94c2e4a8caea.png\"\n", + ",SPRITE_DIR / \"Trumpet.sprite3\" / \"47a1ec267505be96b678df30b92ec534.png\"\n", + ",SPRITE_DIR / \"Trumpet.sprite3\" / \"9a5c211622d6d2fed600c1809fccd21d.png\"\n", + ",SPRITE_DIR / \"Unicorn 2.sprite3\" / \"dcbeac8e856c9ddd6c457376be6573c8.png\"\n", + ",SPRITE_DIR / \"Unicorn Running.sprite3\" / \"1fb3d038e985c01899881bc5bb373c16.png\"\n", + ",SPRITE_DIR / \"Unicorn Running.sprite3\" / \"4709966d11b37e8a11d24c800e8b2859.png\"\n", + ",SPRITE_DIR / \"Unicorn Running.sprite3\" / \"8feaeec435125227c675dd95f69ff835.png\"\n", + ",SPRITE_DIR / \"Unicorn Running.sprite3\" / \"e111350b8bedefffee0d5e7e2490d446.png\"\n", + ",SPRITE_DIR / \"Unicorn Running.sprite3\" / \"f00efa25fc97f2cce2499771d6a5f809.png\"\n", + ",SPRITE_DIR / \"Unicorn Running.sprite3\" / \"fa5fe4596494a43db8c7957d2254aee3.png\"\n", + ",SPRITE_DIR / \"Unicorn.sprite3\" / \"1439d51d9878276362b123c9045af6b5.png\"\n", + ",SPRITE_DIR / \"Wand.sprite3\" / \"c021f0c7e3086a11336421dd864b7812.png\"\n", + ",SPRITE_DIR / \"Wanda.sprite3\" / \"0b008dabac95126132ab4e0c56d25400.png\"\n", + ",SPRITE_DIR / \"Watermelon.sprite3\" / \"1ed1c8b78eae2ee7422074d7f883031d.png\"\n", + ",SPRITE_DIR / \"Watermelon.sprite3\" / \"21d1340478e32a942914a7afd12b9f1a.png\"\n", + ",SPRITE_DIR / \"Watermelon.sprite3\" / \"677738282686d2dcce35d731c3ddc043.png\"\n", + ",SPRITE_DIR / \"Winter Hat.sprite3\" / \"2672323e34d6dc82fda8fc3b057fa5aa.png\"\n", + ",SPRITE_DIR / \"Witch.sprite3\" / \"44cbaf358d2d8e66815e447c25a4b72e.png\"\n", + ",SPRITE_DIR / \"Witch.sprite3\" / \"668c9dc76ba6a07bebabf5aed4623566.png\"\n", + ",SPRITE_DIR / \"Witch.sprite3\" / \"a7e48fc790511fbd46b30b1cdcdc98fc.png\"\n", + ",SPRITE_DIR / \"Witch.sprite3\" / \"b10fb75f426397e10c878fda19d92009.png\"\n", + ",SPRITE_DIR / \"Wizard Girl.sprite3\" / \"4be145d338d921b2d9d6dfd10cda4a6c.png\"\n", + ",SPRITE_DIR / \"Wizard Hat.sprite3\" / \"398e447e36465c2521fdb3a6917b0c65.png\"\n", + ",SPRITE_DIR / \"Wizard-toad.sprite3\" / \"4041d5a2d1869e81268b9b92b49013a3.png\"\n", + ",SPRITE_DIR / \"Wizard-toad.sprite3\" / \"ca3bb4d397ecf6cda3edc48340af908b.png\"\n", + ",SPRITE_DIR / \"Wizard.sprite3\" / \"55ba51188af86ca16ef30267e874c1ed.png\"\n", + ",SPRITE_DIR / \"Wizard.sprite3\" / \"91d495085eb4d02a375c42f6318071e7.png\"\n", + ",SPRITE_DIR / \"Wizard.sprite3\" / \"df943c9894ee4b9df8c5893ce30c2a5f.png\"\n", + ",SPRITE_DIR / \"Zebra.sprite3\" / \"0e3bc5073305b7079b5e9a8c7b7d7f9b.png\"\n", + ",SPRITE_DIR / \"Zebra.sprite3\" / \"f3e322a25b9f79801066056de6f33fb1.png\"\n", + ",BACKDROP_DIR / \"Arctic.sb3\" / \"67e0db3305b3c8bac3a363b1c428892e.png\"\n", + ",BACKDROP_DIR / \"Arctic.sb3\" / \"8eb8790be5507fdccf73e7c1570bbbab.png\"\n", + ",BACKDROP_DIR / \"Baseball 1.sb3\" / \"825d9b54682c406215d9d1f98a819449.png\"\n", + ",BACKDROP_DIR / \"Baseball 2.sb3\" / \"7be1f5b3e682813dac1f297e52ff7dca.png\"\n", + ",BACKDROP_DIR / \"Basketball 1.sb3\" / \"ae21eac3d1814aee1d37ae82ea287816.png\"\n", + ",BACKDROP_DIR / \"Basketball 2.sb3\" / \"a5865738283613a2725b2c9dda6d8c78.png\"\n", + ",BACKDROP_DIR / \"Beach Malibu.sb3\" / \"050615fe992a00d6af0e664e497ebf53.png\"\n", + ",BACKDROP_DIR / \"Beach Rio.sb3\" / \"968f0ede6e70e1dbb763d6fd4c5003e0.png\"\n", + ",BACKDROP_DIR / \"Bedroom 1.sb3\" / \"7aa6bbb2ddc4c10f901e1a50aeac1c7e.png\"\n", + ",BACKDROP_DIR / \"Bedroom 2.sb3\" / \"e2f8b0dbd0a65d2ad8bfc21616662a6a.png\"\n", + ",BACKDROP_DIR / \"Bedroom 3.sb3\" / \"8cc0b88d53345b3e337e8f028a32a4e7.png\"\n", + ",BACKDROP_DIR / \"Bench With View.sb3\" / \"962201a2b712a302fb087f8f0dcb2076.png\"\n", + ",BACKDROP_DIR / \"Blue Sky 2.sb3\" / \"8eb8790be5507fdccf73e7c1570bbbab.png\"\n", + ",BACKDROP_DIR / \"Blue Sky.sb3\" / \"e7c147730f19d284bcd7b3f00af19bb6.png\"\n", + ",BACKDROP_DIR / \"Boardwalk.sb3\" / \"de0e54cd11551566f044e7e6bc588b2c.png\"\n", + ",BACKDROP_DIR / \"Canyon.sb3\" / \"c7c0b27b959193a0b570a9639cfe8158.png\"\n", + ",BACKDROP_DIR / \"Castle 1.sb3\" / \"e1914ed7917267f1c2ef2b48004cade9.png\"\n", + ",BACKDROP_DIR / \"Castle 2.sb3\" / \"951765ee7f7370f120c9df20b577c22f.png\"\n", + ",BACKDROP_DIR / \"Castle 3.sb3\" / \"76fa99f67569fcd39b4be74ed38c33f3.png\"\n", + ",BACKDROP_DIR / \"Castle 4.sb3\" / \"4f45f79af8e8dac3d41eb5a06ade61d4.png\"\n", + ",BACKDROP_DIR / \"Chalkboard.sb3\" / \"a8a24b5aa717bbef09dbe31368914427.png\"\n", + ",BACKDROP_DIR / \"Circles.sb3\" / \"c9847be305920807c5597d81576dd0c4.png\"\n", + ",BACKDROP_DIR / \"City With Water.sb3\" / \"1ef98019fc94ea65a1b55d5521285c7a.png\"\n", + ",BACKDROP_DIR / \"Colorful City.sb3\" / \"04d18ddd1b85f0ea30beb14b8da49f60.png\"\n", + ",BACKDROP_DIR / \"Concert.sb3\" / \"c8d90320d2966c08af8cdd1c6a7a93b5.png\"\n", + ",BACKDROP_DIR / \"Desert.sb3\" / \"d98a9526a34890cf4bad11b5409eae2a.png\"\n", + ",BACKDROP_DIR / \"Farm.sb3\" / \"1e8a70bd07f1dcba3383883f3b948266.png\"\n", + ",BACKDROP_DIR / \"Field At Mit.sb3\" / \"5b0a970202b464915915260c03f05455.png\"\n", + ",BACKDROP_DIR / \"Flowers.sb3\" / \"25a6ede51a96d4e55de2ffb81ae96f8c.png\"\n", + ",BACKDROP_DIR / \"Forest.sb3\" / \"92968ac16b2f0c3f7835a6dacd172c7b.png\"\n", + ",BACKDROP_DIR / \"Galaxy.sb3\" / \"5fab1922f254ae9fd150162c3e392bef.png\"\n", + ",BACKDROP_DIR / \"Garden-rock.sb3\" / \"4f66053598bea0905e1559ab9d5a6e31.png\"\n", + ",BACKDROP_DIR / \"Greek Theater.sb3\" / \"93d71e8b8a96cc007b8d68f36acd338a.png\"\n", + ",BACKDROP_DIR / \"Hall.sb3\" / \"ea86ca30b346f27ca5faf1254f6a31e3.png\"\n", + ",BACKDROP_DIR / \"Hay Field.sb3\" / \"da102a69d135973e0fc139131dec785a.png\"\n", + ",BACKDROP_DIR / \"Hearts.sb3\" / \"f98526ccb0eec3ac7d6c8f8ab502825e.png\"\n", + ",BACKDROP_DIR / \"Hill.sb3\" / \"2129c842f28d6881f622fdc3497ff2da.png\"\n", + ",BACKDROP_DIR / \"Jungle.sb3\" / \"f4f908da19e2753f3ed679d7b37650ca.png\"\n", + ",BACKDROP_DIR / \"Jurassic.sb3\" / \"64025bdca5db4938f65597e3682fddcf.png\"\n", + ",BACKDROP_DIR / \"Light.sb3\" / \"4b98c07876ed8997c3762e75790507b4.png\"\n", + ",BACKDROP_DIR / \"Metro.sb3\" / \"0b4a15ba028bf205ec051390d6ac4de7.png\"\n", + ",BACKDROP_DIR / \"Moon.sb3\" / \"0b1d2eaf22d62ef88de80ccde5578fba.png\"\n", + ",BACKDROP_DIR / \"Mountain.sb3\" / \"f84989feee2cf462a1c597169777ee3c.png\"\n", + ",BACKDROP_DIR / \"Mural.sb3\" / \"efb625f7e0b199b15f69e116cd053cea.png\"\n", + ",BACKDROP_DIR / \"Nebula.sb3\" / \"9b5cdbd596da1b6149f56b794b6394f4.png\"\n", + ",BACKDROP_DIR / \"Neon Tunnel.sb3\" / \"57d2b13b2f73d3d878c72810c137b0d6.png\"\n", + ",BACKDROP_DIR / \"Night City With Street.sb3\" / \"14443ad7907b6479d7562a12b8ae0efb.png\"\n", + ",BACKDROP_DIR / \"Night City.sb3\" / \"6fdc795ff487204f72740567be5f64f9.png\"\n", + ",BACKDROP_DIR / \"Party.sb3\" / \"108160d0e44d1c340182e31c9dc0758a.png\"\n", + ",BACKDROP_DIR / \"Pathway.sb3\" / \"5d747ec036755a4b129f0d5b978bc61c.png\"\n", + ",BACKDROP_DIR / \"Playground.sb3\" / \"e5f794c8756ca0cead5cb7e7fe354c41.png\"\n", + ",BACKDROP_DIR / \"Playing Field.sb3\" / \"2de108f3098e92f5c5976cf75d38e99d.png\"\n", + ",BACKDROP_DIR / \"Pool.sb3\" / \"6cab934df643d2fc508cfa90c0c4059b.png\"\n", + ",BACKDROP_DIR / \"Rays.sb3\" / \"87e963282db9e020e8c4d075891ea12b.png\"\n", + ",BACKDROP_DIR / \"Refrigerator.sb3\" / \"98f053f9681e872f34fafd783ce72205.png\"\n", + ",BACKDROP_DIR / \"Room 1.sb3\" / \"87ec29ad216c0074c731d581c7f40c39.png\"\n", + ",BACKDROP_DIR / \"Room 1.sb3\" / \"a81668321aa3dcc0fc185d3e36ae76f6.png\"\n", + ",BACKDROP_DIR / \"Room 2.sb3\" / \"05ae3e3bbea890a6e3552ffe8456775e.png\"\n", + ",BACKDROP_DIR / \"Savanna.sb3\" / \"9b020b8c7cb6a9592f7303add9441d8f.png\"\n", + ",BACKDROP_DIR / \"School.sb3\" / \"1dea69ac0f62cf538d368a7bde1372ac.png\"\n", + ",BACKDROP_DIR / \"Slopes.sb3\" / \"63b6a69594a0a87888b56244bfa2ac1b.png\"\n", + ",BACKDROP_DIR / \"Soccer 2.sb3\" / \"b0dc1268cb595aaeef405bce40d1639c.png\"\n", + ",BACKDROP_DIR / \"Soccer.sb3\" / \"04a63154f04b09494354090f7cc2f1b9.png\"\n", + ",BACKDROP_DIR / \"Space City 1.sb3\" / \"20344b0edcc498281e4cb80242a72667.png\"\n", + ",BACKDROP_DIR / \"Space City 2.sb3\" / \"32b2316fd375faa18088f6c57ebb1c8d.png\"\n", + ",BACKDROP_DIR / \"Space.sb3\" / \"84208d9a3718ec3c9fc5a32a792fa1d0.png\"\n", + ",BACKDROP_DIR / \"Spaceship.sb3\" / \"0c450891306fa63ef02aa0fda7fd0ef9.png\"\n", + ",BACKDROP_DIR / \"Spotlight.sb3\" / \"d26bf4c3980163d9106625cc2ea6c50d.png\"\n", + ",BACKDROP_DIR / \"Stars.sb3\" / \"47282ff0f7047c6fab9c94b531abf721.png\"\n", + ",BACKDROP_DIR / \"Stripes.sb3\" / \"a6a21f5c08d586e8daaebde37c97fb6f.png\"\n", + ",BACKDROP_DIR / \"Theater 2.sb3\" / \"061a78ed83495dd0acd6d62e83e1b972.png\"\n", + ",BACKDROP_DIR / \"Theater.sb3\" / \"c2b097bc5cdb6a14ef5485202bc5ee76.png\"\n", + ",BACKDROP_DIR / \"Tree.sb3\" / \"a23fbf972001c94637b568992f8fd7bd.png\"\n", + ",BACKDROP_DIR / \"Underwater 1.sb3\" / \"d3344650f594bcecdf46aa4a9441badd.png\"\n", + ",BACKDROP_DIR / \"Underwater 2.sb3\" / \"1517c21786d2d0edc2f3037408d850bd.png\"\n", + ",BACKDROP_DIR / \"Urban.sb3\" / \"1679049718869e1f548e1e8823e29c1c.png\"\n", + ",BACKDROP_DIR / \"Wall 1.sb3\" / \"7e5327c68ff6ddabc48dbfe4717a04fe.png\"\n", + ",BACKDROP_DIR / \"Wall 2.sb3\" / \"82d867fcd9f1b5f49e29c2f853d55665.png\"\n", + ",BACKDROP_DIR / \"Water And Rocks.sb3\" / \"0015433a406a53f00b792424b823268c.png\"\n", + ",BACKDROP_DIR / \"Wetland.sb3\" / \"ef9973bcff6d4cbc558e946028ec7d23.png\"\n", + ",BACKDROP_DIR / \"Winter.sb3\" / \"5fa9385a60b904672d0e46e9d768bb32.png\"\n", + ",BACKDROP_DIR / \"Witch House.sb3\" / \"30085b2d27beb5acdbe895d8b3e64b04.png\"\n", + ",BACKDROP_DIR / \"Woods And Bench.sb3\" / \"4fcf7ed0de6c6b6e9b52c511b0650e9c.png\"\n", + ",BACKDROP_DIR / \"Woods.sb3\" / \"f3eb165d6f3fd23370f97079f2e631bf.png\"\n", + ",BACKDROP_DIR / \"Xy-grid-20px.sb3\" / \"4eec0e1db92b8dea3e5bee25105e8f46.png\"\n", + ",BACKDROP_DIR / \"Xy-grid-30px.sb3\" / \"3b8bcabd0ac683b7cb3673208039764b.png\"\n", + ",BACKDROP_DIR / \"Xy-grid.sb3\" / \"9838d02002d05f88dc54d96494fbc202.png\",\n", + " CODE_BLOCKS_DIR / \"script1.jpg\",\n", + " CODE_BLOCKS_DIR / \"script2.jpg\",\n", + " CODE_BLOCKS_DIR / \"script3.jpg\",\n", + " CODE_BLOCKS_DIR / \"script4.jpg\",\n", + " CODE_BLOCKS_DIR / \"script5.jpg\",\n", + " CODE_BLOCKS_DIR / \"script6.jpg\"]\n", + " folder_image_paths = [os.path.normpath(str(p)) for p in folder_image_paths]\n", + " # =========================================\n", + "\n", + " # -----------------------------------------\n", + " # Load reference embeddings from JSON\n", + " # -----------------------------------------\n", + " with open(f\"{BLOCKS_DIR}/embed.json\", \"r\") as f:\n", + " embedding_json = json.load(f)\n", + "\n", + " # =========================================\n", + " # Decode & embed each sprite image\n", + " # =========================================\n", + " # sprite_features = []\n", + " # for b64 in sprite_base64:\n", + " # if \",\" in b64:\n", + " # b64 = b64.split(\",\", 1)[1]\n", + " \n", + " # img_bytes = base64.b64decode(b64)\n", + " # pil_img = Image.open(BytesIO(img_bytes)).convert(\"RGB\")\n", + " # buf = BytesIO()\n", + " # pil_img.save(buf, format=\"PNG\")\n", + " # buf.seek(0)\n", + " # feats = clip_embd.embed_image([buf])[0]\n", + " # sprite_features.append(feats)\n", + "\n", + " # ============================== #\n", + " # EMBED SPRITE IMAGES #\n", + " # ============================== #\n", + " # ensure model is initialized (fast no-op after first call)\n", + " init_dinov2()\n", + "\n", + " # embed the incoming sprite BytesIO images (same data structure you already use)\n", + " sprite_matrix = embed_bytesio_list(sprite_images_bytes, batch_size=8) # shape (N, D)\n", + "\n", + " # load reference embeddings from JSON (they must be numeric lists)\n", + " img_matrix = np.array([img[\"embeddings\"] for img in embedding_json], dtype=np.float32)\n", + "\n", + " # normalize both sides (important — stored embeddings may not be normalized)\n", + " sprite_matrix = l2_normalize_rows(sprite_matrix)\n", + " img_matrix = l2_normalize_rows(img_matrix)\n", + " \n", + " # =========================================\n", + " # Compute similarities & pick best match\n", + " # =========================================\n", + " similarity = np.matmul(sprite_matrix, img_matrix.T)\n", + " most_similar_indices = np.argmax(similarity, axis=1)\n", + "\n", + " # =========================================\n", + " # Copy matched sprite assets + collect data\n", + " # =========================================\n", + " project_data = []\n", + " copied_folders = set()\n", + "\n", + " for sprite_idx, matched_idx in enumerate(most_similar_indices):\n", + " matched_image_path = folder_image_paths[matched_idx]\n", + " matched_folder = os.path.dirname(matched_image_path)\n", + "\n", + " # CHANGED: use our new normalized sprite_base_path\n", + " if not matched_folder.startswith(sprite_base_path):\n", + " continue\n", + "\n", + " if matched_folder in copied_folders:\n", + " continue\n", + " copied_folders.add(matched_folder)\n", + " logger.info(f\"Matched sprite: {matched_image_path}\")\n", + "\n", + " sprite_json_path = os.path.join(matched_folder, 'sprite.json')\n", + " if not os.path.exists(sprite_json_path):\n", + " logger.warning(f\"No sprite.json in {matched_folder}\")\n", + " continue\n", + "\n", + " with open(sprite_json_path, 'r') as f:\n", + " sprite_info = json.load(f)\n", + " # copy all non‐matched files\n", + " for fname in os.listdir(matched_folder):\n", + " if fname in (os.path.basename(matched_image_path), 'sprite.json'):\n", + " continue\n", + " shutil.copy2(os.path.join(matched_folder, fname),\n", + " os.path.join(project_folder, fname))\n", + " project_data.append(sprite_info)\n", + "\n", + " # =========================================\n", + " # Copy matched backdrop assets + collect\n", + " # =========================================\n", + " backdrop_data = []\n", + " copied_backdrop_folders = set()\n", + " for backdrop_idx, matched_idx in enumerate(most_similar_indices):\n", + " matched_image_path = folder_image_paths[matched_idx]\n", + " matched_folder = os.path.dirname(matched_image_path)\n", + " matched_filename = os.path.basename(matched_image_path)\n", + " \n", + " # CHANGED: use our new normalized backdrop_base_path\n", + " if not matched_folder.startswith(backdrop_base_path):\n", + " continue\n", + "\n", + " # skip if backdrop folder already processed\n", + " if matched_folder in copied_backdrop_folders:\n", + " continue\n", + " copied_backdrop_folders.add(matched_folder)\n", + "\n", + " logger.info(f\"Matched backdrop: {matched_image_path}\")\n", + "\n", + " # 1) Copy the matched backdrop image itself\n", + " try:\n", + " shutil.copy2(\n", + " matched_image_path,\n", + " os.path.join(project_folder, matched_filename)\n", + " )\n", + " logger.info(f\"✅ Copied matched backdrop image {matched_filename} to {project_folder}\")\n", + " except Exception as e:\n", + " logger.error(f\"❌ Failed to copy matched backdrop {matched_image_path}: {e}\")\n", + " \n", + " # copy non‐matched files\n", + " for fname in os.listdir(matched_folder):\n", + " # if fname in (os.path.basename(matched_image_path), 'project.json'):\n", + " if fname in {matched_filename, 'project.json'}:\n", + " continue\n", + " # shutil.copy2(os.path.join(matched_folder, fname),\n", + " # os.path.join(project_folder, fname))\n", + " src = os.path.join(matched_folder, fname)\n", + " dst = os.path.join(project_folder, fname)\n", + " if os.path.isfile(src):\n", + " try:\n", + " shutil.copy2(src, dst)\n", + " logger.info(f\"Copied additional backdrop asset {fname} to project folder\")\n", + " except Exception as e:\n", + " logger.error(f\"Failed to copy {src}: {e}\")\n", + "\n", + " # append the stage‐target from its project.json\n", + " pj = os.path.join(matched_folder, 'project.json')\n", + " if os.path.exists(pj):\n", + " with open(pj, 'r') as f:\n", + " bd_json = json.load(f)\n", + " for tgt in bd_json.get(\"targets\", []):\n", + " if tgt.get(\"isStage\"):\n", + " backdrop_data.append(tgt)\n", + " else:\n", + " logger.warning(f\"No project.json in {matched_folder}\")\n", + "\n", + "\n", + " # =========================================\n", + " # Merge into final Scratch project.json\n", + " # =========================================\n", + " final_project = {\n", + " \"targets\": [], \"monitors\": [], \"extensions\": [],\n", + " \"meta\": {\n", + " \"semver\": \"3.0.0\",\n", + " \"vm\": \"11.3.0\",\n", + " \"agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36\"\n", + " }\n", + " }\n", + " # sprites first\n", + " for spr in project_data:\n", + " if not spr.get(\"isStage\", False):\n", + " final_project[\"targets\"].append(spr)\n", + "\n", + " # then backdrop as the Stage\n", + " if backdrop_data:\n", + " all_costumes, sounds = [], []\n", + " seen_costumes = set()\n", + " for i, bd in enumerate(backdrop_data):\n", + " for costume in bd.get(\"costumes\", []):\n", + " # Create a unique key for the costume\n", + " key = (costume.get(\"name\"), costume.get(\"assetId\"))\n", + " if key not in seen_costumes:\n", + " seen_costumes.add(key)\n", + " all_costumes.append(costume)\n", + " \n", + " if i == 0:\n", + " sounds = bd.get(\"sounds\", [])\n", + " stage_obj={\n", + " \"isStage\": True,\n", + " \"name\": \"Stage\",\n", + " \"objName\": \"Stage\",\n", + " \"variables\": {}, \n", + " \"lists\": {}, \n", + " \"broadcasts\": {},\n", + " \"blocks\": {}, \n", + " \"comments\": {},\n", + " \"currentCostume\": 1 if len(all_costumes) > 1 else 0,\n", + " \"costumes\": all_costumes,\n", + " \"sounds\": sounds,\n", + " \"volume\": 100, \n", + " \"layerOrder\": 0,\n", + " \"tempo\": 60, \n", + " \"videoTransparency\": 50,\n", + " \"videoState\": \"on\",\n", + " \"textToSpeechLanguage\": None\n", + " }\n", + " final_project[\"targets\"].insert(0, stage_obj)\n", + " else:\n", + " logger.warning(\"⚠️ No backdrop matched. Using default static backdrop.\")\n", + " default_backdrop_path = BACKDROP_DIR / \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + " default_backdrop_name = \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + "\n", + " default_backdrop_sound = BACKDROP_DIR / \"83a9787d4cb6f3b7632b4ddfebf74367.wav\"\n", + " default_backdrop_sound_name = \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + " try:\n", + " shutil.copy2(default_backdrop_path, os.path.join(project_folder, default_backdrop_name))\n", + " logger.info(f\"✅ Default backdrop copied to project: {default_backdrop_name}\")\n", + "\n", + " shutil.copy2(default_backdrop_sound, os.path.join(project_folder, default_backdrop_sound_name))\n", + " logger.info(f\"✅ Default backdrop sound copied to project: {default_backdrop_sound_name}\")\n", + " except Exception as e:\n", + " logger.error(f\"❌ Failed to copy default backdrop: {e}\")\n", + " \n", + " stage_obj={\n", + " \"isStage\": True,\n", + " \"name\": \"Stage\",\n", + " \"objName\": \"Stage\",\n", + " \"variables\": {},\n", + " \"lists\": {},\n", + " \"broadcasts\": {},\n", + " \"blocks\": {},\n", + " \"comments\": {},\n", + " \"currentCostume\": 0,\n", + " \"costumes\": [\n", + " {\n", + " \"assetId\": default_backdrop_name.split(\".\")[0],\n", + " \"name\": \"defaultBackdrop\",\n", + " \"md5ext\": default_backdrop_name,\n", + " \"dataFormat\": \"svg\",\n", + " \"rotationCenterX\": 240,\n", + " \"rotationCenterY\": 180\n", + " }\n", + " ],\n", + " \"sounds\": [\n", + " {\n", + " \"name\": \"pop\",\n", + " \"assetId\": \"83a9787d4cb6f3b7632b4ddfebf74367\",\n", + " \"dataFormat\": \"wav\",\n", + " \"format\": \"\",\n", + " \"rate\": 48000,\n", + " \"sampleCount\": 1123,\n", + " \"md5ext\": \"83a9787d4cb6f3b7632b4ddfebf74367.wav\"\n", + " }\n", + " ],\n", + " \"volume\": 100,\n", + " \"layerOrder\": 0,\n", + " \"tempo\": 60,\n", + " \"videoTransparency\": 50,\n", + " \"videoState\": \"on\",\n", + " \"textToSpeechLanguage\": None\n", + " }\n", + " final_project[\"targets\"].insert(0, stage_obj)\n", + "\n", + " with open(project_json_path, 'w') as f:\n", + " json.dump(final_project, f, indent=2)\n", + "\n", + " return project_json_path\n" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "622d3e72", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Tesseract is installed! Version: 5.5.0.20241111\n", + "\n", + "Testing OCR on D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\langgraph_workflow_main.png:\n", + "OCR result:\n", + "te\n", + "\n" + ] + } + ], + "source": [ + "import pytesseract\n", + "from PIL import Image\n", + "import sys\n", + " \n", + "def check_tesseract():\n", + " try:\n", + " # Get the tesseract version to confirm it's installed\n", + " version = pytesseract.get_tesseract_version()\n", + " print(f\"Tesseract is installed! Version: {version}\")\n", + " except Exception as e:\n", + " print(\"Error: Tesseract is not working or not found.\")\n", + " print(e)\n", + " sys.exit(1)\n", + " \n", + "def test_ocr(image_path):\n", + " try:\n", + " img = Image.open(image_path)\n", + " text = pytesseract.image_to_string(img, lang='eng', config='--psm 6')\n", + " print(\"OCR result:\")\n", + " print(text)\n", + " img.show()\n", + " except Exception as e:\n", + " print(\"Error processing image with Tesseract.\")\n", + " print(e)\n", + " \n", + "if __name__ == \"__main__\":\n", + " check_tesseract()\n", + " \n", + " # Test OCR with a sample image (replace 'sample.png' with your image path)\n", + " sample_image = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\langgraph_workflow_main.png\"\n", + " print(f\"\\nTesting OCR on {sample_image}:\")\n", + " test_ocr(sample_image)\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "2cc9af18", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tesseract v5.5.0.20241111\n", + " leptonica-1.85.0\n", + " libgif 5.2.2 : libjpeg 8d (libjpeg-turbo 3.0.4) : libpng 1.6.44 : libtiff 4.7.0 : zlib 1.3.1 : libwebp 1.4.0 : libopenjp2 2.5.2\n", + " Found AVX512BW\n", + " Found AVX512F\n", + " Found AVX512VNNI\n", + " Found AVX2\n", + " Found AVX\n", + " Found FMA\n", + " Found SSE4.1\n", + " Found libarchive 3.7.7 zlib/1.3.1 liblzma/5.6.3 bz2lib/1.0.8 liblz4/1.10.0 libzstd/1.5.6\n", + " Found libcurl/8.11.0 Schannel zlib/1.3.1 brotli/1.1.0 zstd/1.5.6 libidn2/2.3.7 libpsl/0.21.5 libssh2/1.11.0\n", + "\n", + "Available langs: ['eng', 'osd']\n", + "Patched pytesseract.image_to_string — now call partition_pdf(...)\n" + ] + } + ], + "source": [ + "import os, subprocess\n", + "# 1) point to your tesseract install and tessdata\n", + "# Try both forms depending on your installation; the first is usually fine:\n", + "os.environ['TESSDATA_PREFIX'] = r\"C:\\Program Files\\Tesseract-OCR\\tessdata\"\n", + "# If the above doesn't work try:\n", + "# os.environ['TESSDATA_PREFIX'] = r\"C:\\Program Files\\Tesseract-OCR\"\n", + "\n", + "import pytesseract\n", + "pytesseract.pytesseract.tesseract_cmd = r\"C:\\Program Files\\Tesseract-OCR\\tesseract.exe\"\n", + "\n", + "# quick checks\n", + "print(subprocess.check_output([pytesseract.pytesseract.tesseract_cmd, '--version']).decode())\n", + "try:\n", + " print(\"Available langs:\", pytesseract.get_languages(config=''))\n", + "except Exception as e:\n", + " print(\"get_languages() failed:\", e)\n", + "\n", + "# 2) monkeypatch pytesseract.image_to_string to remove stray quotes in config\n", + "_orig_image_to_string = pytesseract.image_to_string\n", + "\n", + "def _patched_image_to_string(image, lang=None, config='', *args, **kwargs):\n", + " # Normalize config: replace -l 'eng' or -l \"eng\" with -l eng and remove extra quotes\n", + " if config and \"-l\" in config:\n", + " # remove single/double quotes around the language value\n", + " # e.g. \"-l 'eng'\" -> \"-l eng\"\n", + " config = config.replace(\"-l '\", \"-l \").replace('-l \"', \"-l \")\n", + " config = config.replace(\"'\", \"\").replace('\"', \"\")\n", + " # ensure config still has -l style\n", + " return _orig_image_to_string(image, lang=lang, config=config, *args, **kwargs)\n", + "\n", + "pytesseract.image_to_string = _patched_image_to_string\n", + "\n", + "print(\"Patched pytesseract.image_to_string — now call partition_pdf(...)\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "c1ee42a7", + "metadata": {}, + "outputs": [], + "source": [ + "# Replace this with the actual path to your PDF file\n", + "pdf_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\New folder\\c05f433d840a416580b513835a6bfcf2.pdf\"\n", + " \n", + "# Open and read the PDF file as bytes, then wrap it in a BytesIO stream\n", + "with open(pdf_path, \"rb\") as pdf_file:\n", + " pdf_bytes = pdf_file.read()\n", + " pdf_stream = io.BytesIO(pdf_bytes)\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "id": "832b4228", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "unstructured_inference is not installed. Cannot use the hi_res partitioning strategy. Falling back to partitioning with another strategy.\n", + "Falling back to partitioning with ocr_only.\n" + ] + }, + { + "ename": "RuntimeError", + "evalue": "❌ Error in extract_images_from_pdf: ❌ Failed to extract images from PDF: (1, 'Error opening data file C:\\\\Program Files\\\\Tesseract-OCR\\\\tessdata/\\'eng\\'.traineddata Please make sure the TESSDATA_PREFIX environment variable is set to your \"tessdata\" directory. Failed loading language \\'\\'eng\\'\\' Tesseract couldn\\'t load any languages! Could not initialize tesseract.')", + "output_type": "error", + "traceback": [ + "\u001b[31m---------------------------------------------------------------------------\u001b[39m", + "\u001b[31mTesseractError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[24]\u001b[39m\u001b[32m, line 38\u001b[39m, in \u001b[36mextract_images_from_pdf\u001b[39m\u001b[34m(pdf_stream)\u001b[39m\n\u001b[32m 37\u001b[39m ocr_lang = \u001b[33m\"\u001b[39m\u001b[33meng\u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m---> \u001b[39m\u001b[32m38\u001b[39m elements = \u001b[43mpartition_pdf\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 39\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;66;43;03m# filename=str(pdf_path), # partition_pdf might expect a string\u001b[39;49;00m\n\u001b[32m 40\u001b[39m \u001b[43m \u001b[49m\u001b[43mfile\u001b[49m\u001b[43m=\u001b[49m\u001b[43mpdf_stream\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# 'file=', inplace of 'filename'\u001b[39;49;00m\n\u001b[32m 41\u001b[39m \u001b[43m \u001b[49m\u001b[43mstrategy\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mhi_res\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 42\u001b[39m \u001b[43m \u001b[49m\u001b[43mextract_image_block_types\u001b[49m\u001b[43m=\u001b[49m\u001b[43m[\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mImage\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 43\u001b[39m \u001b[43m \u001b[49m\u001b[43mhi_res_model_name\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43myolox\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[32m 44\u001b[39m \u001b[43m \u001b[49m\u001b[43mextract_image_block_to_payload\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 45\u001b[39m \u001b[43m \u001b[49m\u001b[43mocr_languages\u001b[49m\u001b[43m=\u001b[49m\u001b[43mocr_lang\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 46\u001b[39m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 47\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mELEMENTS\u001b[39m\u001b[33m\"\u001b[39m)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\unstructured\\documents\\elements.py:237\u001b[39m, in \u001b[36mprocess_metadata..decorator..wrapper\u001b[39m\u001b[34m(*args, **kwargs)\u001b[39m\n\u001b[32m 235\u001b[39m \u001b[38;5;129m@wraps\u001b[39m(func)\n\u001b[32m 236\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mwrapper\u001b[39m(*args, **kwargs):\n\u001b[32m--> \u001b[39m\u001b[32m237\u001b[39m elements = \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 238\u001b[39m sig = inspect.signature(func)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\unstructured\\file_utils\\filetype.py:630\u001b[39m, in \u001b[36madd_metadata_with_filetype..decorator..wrapper\u001b[39m\u001b[34m(*args, **kwargs)\u001b[39m\n\u001b[32m 628\u001b[39m \u001b[38;5;129m@wraps\u001b[39m(func)\n\u001b[32m 629\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mwrapper\u001b[39m(*args, **kwargs):\n\u001b[32m--> \u001b[39m\u001b[32m630\u001b[39m elements = \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 631\u001b[39m sig = inspect.signature(func)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\unstructured\\partition\\pdf.py:95\u001b[39m, in \u001b[36mpartition_pdf\u001b[39m\u001b[34m(filename, file, include_page_breaks, strategy, infer_table_structure, ocr_languages, max_partition, min_partition, include_metadata, metadata_filename, metadata_last_modified, **kwargs)\u001b[39m\n\u001b[32m 94\u001b[39m exactly_one(filename=filename, file=file)\n\u001b[32m---> \u001b[39m\u001b[32m95\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mpartition_pdf_or_image\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 96\u001b[39m \u001b[43m \u001b[49m\u001b[43mfilename\u001b[49m\u001b[43m=\u001b[49m\u001b[43mfilename\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 97\u001b[39m \u001b[43m \u001b[49m\u001b[43mfile\u001b[49m\u001b[43m=\u001b[49m\u001b[43mfile\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 98\u001b[39m \u001b[43m \u001b[49m\u001b[43minclude_page_breaks\u001b[49m\u001b[43m=\u001b[49m\u001b[43minclude_page_breaks\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 99\u001b[39m \u001b[43m \u001b[49m\u001b[43mstrategy\u001b[49m\u001b[43m=\u001b[49m\u001b[43mstrategy\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 100\u001b[39m \u001b[43m \u001b[49m\u001b[43minfer_table_structure\u001b[49m\u001b[43m=\u001b[49m\u001b[43minfer_table_structure\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 101\u001b[39m \u001b[43m \u001b[49m\u001b[43mocr_languages\u001b[49m\u001b[43m=\u001b[49m\u001b[43mocr_languages\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 102\u001b[39m \u001b[43m \u001b[49m\u001b[43mmax_partition\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmax_partition\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 103\u001b[39m \u001b[43m \u001b[49m\u001b[43mmin_partition\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmin_partition\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 104\u001b[39m \u001b[43m \u001b[49m\u001b[43mmetadata_last_modified\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmetadata_last_modified\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 105\u001b[39m \u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 106\u001b[39m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\unstructured\\partition\\pdf.py:201\u001b[39m, in \u001b[36mpartition_pdf_or_image\u001b[39m\u001b[34m(filename, file, is_image, include_page_breaks, strategy, infer_table_structure, ocr_languages, max_partition, min_partition, metadata_last_modified, **kwargs)\u001b[39m\n\u001b[32m 200\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m warnings.catch_warnings():\n\u001b[32m--> \u001b[39m\u001b[32m201\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_partition_pdf_or_image_with_ocr\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 202\u001b[39m \u001b[43m \u001b[49m\u001b[43mfilename\u001b[49m\u001b[43m=\u001b[49m\u001b[43mfilename\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 203\u001b[39m \u001b[43m \u001b[49m\u001b[43mfile\u001b[49m\u001b[43m=\u001b[49m\u001b[43mfile\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 204\u001b[39m \u001b[43m \u001b[49m\u001b[43minclude_page_breaks\u001b[49m\u001b[43m=\u001b[49m\u001b[43minclude_page_breaks\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 205\u001b[39m \u001b[43m \u001b[49m\u001b[43mocr_languages\u001b[49m\u001b[43m=\u001b[49m\u001b[43mocr_languages\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 206\u001b[39m \u001b[43m \u001b[49m\u001b[43mis_image\u001b[49m\u001b[43m=\u001b[49m\u001b[43mis_image\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 207\u001b[39m \u001b[43m \u001b[49m\u001b[43mmax_partition\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmax_partition\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 208\u001b[39m \u001b[43m \u001b[49m\u001b[43mmin_partition\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmin_partition\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 209\u001b[39m \u001b[43m \u001b[49m\u001b[43mmetadata_last_modified\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmetadata_last_modified\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mlast_modification_date\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 210\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 211\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m layout_elements\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\unstructured\\utils.py:43\u001b[39m, in \u001b[36mrequires_dependencies..decorator..wrapper\u001b[39m\u001b[34m(*args, **kwargs)\u001b[39m\n\u001b[32m 35\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mImportError\u001b[39;00m(\n\u001b[32m 36\u001b[39m \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mFollowing dependencies are missing: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[33m'\u001b[39m\u001b[33m, \u001b[39m\u001b[33m'\u001b[39m.join(missing_deps)\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m. \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 37\u001b[39m + (\n\u001b[32m (...)\u001b[39m\u001b[32m 41\u001b[39m ),\n\u001b[32m 42\u001b[39m )\n\u001b[32m---> \u001b[39m\u001b[32m43\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\unstructured\\partition\\pdf.py:479\u001b[39m, in \u001b[36m_partition_pdf_or_image_with_ocr\u001b[39m\u001b[34m(filename, file, include_page_breaks, ocr_languages, is_image, max_partition, min_partition, metadata_last_modified)\u001b[39m\n\u001b[32m 474\u001b[39m metadata = ElementMetadata(\n\u001b[32m 475\u001b[39m filename=filename,\n\u001b[32m 476\u001b[39m page_number=page_number,\n\u001b[32m 477\u001b[39m last_modified=metadata_last_modified,\n\u001b[32m 478\u001b[39m )\n\u001b[32m--> \u001b[39m\u001b[32m479\u001b[39m text = \u001b[43mpytesseract\u001b[49m\u001b[43m.\u001b[49m\u001b[43mimage_to_string\u001b[49m\u001b[43m(\u001b[49m\u001b[43mimage\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m=\u001b[49m\u001b[33;43mf\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43m-l \u001b[39;49m\u001b[33;43m'\u001b[39;49m\u001b[38;5;132;43;01m{\u001b[39;49;00m\u001b[43mocr_languages\u001b[49m\u001b[38;5;132;43;01m}\u001b[39;49;00m\u001b[33;43m'\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[32m 481\u001b[39m _elements = partition_text(\n\u001b[32m 482\u001b[39m text=text,\n\u001b[32m 483\u001b[39m max_partition=max_partition,\n\u001b[32m 484\u001b[39m min_partition=min_partition,\n\u001b[32m 485\u001b[39m )\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\pytesseract\\pytesseract.py:486\u001b[39m, in \u001b[36mimage_to_string\u001b[39m\u001b[34m(image, lang, config, nice, output_type, timeout)\u001b[39m\n\u001b[32m 484\u001b[39m args = [image, \u001b[33m'\u001b[39m\u001b[33mtxt\u001b[39m\u001b[33m'\u001b[39m, lang, config, nice, timeout]\n\u001b[32m--> \u001b[39m\u001b[32m486\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m{\u001b[49m\n\u001b[32m 487\u001b[39m \u001b[43m \u001b[49m\u001b[43mOutput\u001b[49m\u001b[43m.\u001b[49m\u001b[43mBYTES\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mlambda\u001b[39;49;00m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_and_get_output\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43m(\u001b[49m\u001b[43margs\u001b[49m\u001b[43m \u001b[49m\u001b[43m+\u001b[49m\u001b[43m \u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 488\u001b[39m \u001b[43m \u001b[49m\u001b[43mOutput\u001b[49m\u001b[43m.\u001b[49m\u001b[43mDICT\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mlambda\u001b[39;49;00m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43m{\u001b[49m\u001b[33;43m'\u001b[39;49m\u001b[33;43mtext\u001b[39;49m\u001b[33;43m'\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_and_get_output\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 489\u001b[39m \u001b[43m \u001b[49m\u001b[43mOutput\u001b[49m\u001b[43m.\u001b[49m\u001b[43mSTRING\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mlambda\u001b[39;49;00m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_and_get_output\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 490\u001b[39m \u001b[43m\u001b[49m\u001b[43m}\u001b[49m\u001b[43m[\u001b[49m\u001b[43moutput_type\u001b[49m\u001b[43m]\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\pytesseract\\pytesseract.py:489\u001b[39m, in \u001b[36mimage_to_string..\u001b[39m\u001b[34m()\u001b[39m\n\u001b[32m 484\u001b[39m args = [image, \u001b[33m'\u001b[39m\u001b[33mtxt\u001b[39m\u001b[33m'\u001b[39m, lang, config, nice, timeout]\n\u001b[32m 486\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m {\n\u001b[32m 487\u001b[39m Output.BYTES: \u001b[38;5;28;01mlambda\u001b[39;00m: run_and_get_output(*(args + [\u001b[38;5;28;01mTrue\u001b[39;00m])),\n\u001b[32m 488\u001b[39m Output.DICT: \u001b[38;5;28;01mlambda\u001b[39;00m: {\u001b[33m'\u001b[39m\u001b[33mtext\u001b[39m\u001b[33m'\u001b[39m: run_and_get_output(*args)},\n\u001b[32m--> \u001b[39m\u001b[32m489\u001b[39m Output.STRING: \u001b[38;5;28;01mlambda\u001b[39;00m: \u001b[43mrun_and_get_output\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m,\n\u001b[32m 490\u001b[39m }[output_type]()\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\pytesseract\\pytesseract.py:352\u001b[39m, in \u001b[36mrun_and_get_output\u001b[39m\u001b[34m(image, extension, lang, config, nice, timeout, return_bytes)\u001b[39m\n\u001b[32m 342\u001b[39m kwargs = {\n\u001b[32m 343\u001b[39m \u001b[33m'\u001b[39m\u001b[33minput_filename\u001b[39m\u001b[33m'\u001b[39m: input_filename,\n\u001b[32m 344\u001b[39m \u001b[33m'\u001b[39m\u001b[33moutput_filename_base\u001b[39m\u001b[33m'\u001b[39m: temp_name,\n\u001b[32m (...)\u001b[39m\u001b[32m 349\u001b[39m \u001b[33m'\u001b[39m\u001b[33mtimeout\u001b[39m\u001b[33m'\u001b[39m: timeout,\n\u001b[32m 350\u001b[39m }\n\u001b[32m--> \u001b[39m\u001b[32m352\u001b[39m \u001b[43mrun_tesseract\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 353\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m _read_output(\n\u001b[32m 354\u001b[39m \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mkwargs[\u001b[33m'\u001b[39m\u001b[33moutput_filename_base\u001b[39m\u001b[33m'\u001b[39m]\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;132;01m{\u001b[39;00mextsep\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;132;01m{\u001b[39;00mextension\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m\"\u001b[39m,\n\u001b[32m 355\u001b[39m return_bytes,\n\u001b[32m 356\u001b[39m )\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\pytesseract\\pytesseract.py:284\u001b[39m, in \u001b[36mrun_tesseract\u001b[39m\u001b[34m(input_filename, output_filename_base, extension, lang, config, nice, timeout)\u001b[39m\n\u001b[32m 283\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m proc.returncode:\n\u001b[32m--> \u001b[39m\u001b[32m284\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m TesseractError(proc.returncode, get_errors(error_string))\n", + "\u001b[31mTesseractError\u001b[39m: (1, 'Error opening data file C:\\\\Program Files\\\\Tesseract-OCR\\\\tessdata/\\'eng\\'.traineddata Please make sure the TESSDATA_PREFIX environment variable is set to your \"tessdata\" directory. Failed loading language \\'\\'eng\\'\\' Tesseract couldn\\'t load any languages! Could not initialize tesseract.')", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[31mRuntimeError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[24]\u001b[39m\u001b[32m, line 49\u001b[39m, in \u001b[36mextract_images_from_pdf\u001b[39m\u001b[34m(pdf_stream)\u001b[39m\n\u001b[32m 48\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[32m---> \u001b[39m\u001b[32m49\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[32m 50\u001b[39m \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33m❌ Failed to extract images from PDF: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mstr\u001b[39m(e)\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m\"\u001b[39m)\n\u001b[32m 52\u001b[39m file_elements = [element.to_dict() \u001b[38;5;28;01mfor\u001b[39;00m element \u001b[38;5;129;01min\u001b[39;00m elements]\n", + "\u001b[31mRuntimeError\u001b[39m: ❌ Failed to extract images from PDF: (1, 'Error opening data file C:\\\\Program Files\\\\Tesseract-OCR\\\\tessdata/\\'eng\\'.traineddata Please make sure the TESSDATA_PREFIX environment variable is set to your \"tessdata\" directory. Failed loading language \\'\\'eng\\'\\' Tesseract couldn\\'t load any languages! Could not initialize tesseract.')", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[31mRuntimeError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[28]\u001b[39m\u001b[32m, line 1\u001b[39m\n\u001b[32m----> \u001b[39m\u001b[32m1\u001b[39m output_path = \u001b[43mextract_images_from_pdf\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpdf_stream\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[24]\u001b[39m\u001b[32m, line 86\u001b[39m, in \u001b[36mextract_images_from_pdf\u001b[39m\u001b[34m(pdf_stream)\u001b[39m\n\u001b[32m 84\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m manipulated_json\n\u001b[32m 85\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[32m---> \u001b[39m\u001b[32m86\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33m❌ Error in extract_images_from_pdf: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mstr\u001b[39m(e)\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m\"\u001b[39m)\n", + "\u001b[31mRuntimeError\u001b[39m: ❌ Error in extract_images_from_pdf: ❌ Failed to extract images from PDF: (1, 'Error opening data file C:\\\\Program Files\\\\Tesseract-OCR\\\\tessdata/\\'eng\\'.traineddata Please make sure the TESSDATA_PREFIX environment variable is set to your \"tessdata\" directory. Failed loading language \\'\\'eng\\'\\' Tesseract couldn\\'t load any languages! Could not initialize tesseract.')" + ] + } + ], + "source": [ + "output_path = extract_images_from_pdf(pdf_stream)" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "e09eb650", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{}" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "output_path" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2ecc33d8", + "metadata": {}, + "outputs": [], + "source": [ + "import pandas as pd" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9262c4f8", + "metadata": {}, + "outputs": [], + "source": [ + "project_folder= r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\test_files\\small\"\n", + "project_output = similarity_matching(output_path, project_folder)" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "3c409dc9", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tesseract v5.5.0.20241111\n", + " leptonica-1.85.0\n", + " libgif 5.2.2 : libjpeg 8d (libjpeg-turbo 3.0.4) : libpng 1.6.44 : libtiff 4.7.0 : zlib 1.3.1 : libwebp 1.4.0 : libopenjp2 2.5.2\n", + " Found AVX512BW\n", + " Found AVX512F\n", + " Found AVX512VNNI\n", + " Found AVX2\n", + " Found AVX\n", + " Found FMA\n", + " Found SSE4.1\n", + " Found libarchive 3.7.7 zlib/1.3.1 liblzma/5.6.3 bz2lib/1.0.8 liblz4/1.10.0 libzstd/1.5.6\n", + " Found libcurl/8.11.0 Schannel zlib/1.3.1 brotli/1.1.0 zstd/1.5.6 libidn2/2.3.7 libpsl/0.21.5 libssh2/1.11.0\n", + "\n", + "['eng', 'osd']\n" + ] + } + ], + "source": [ + "import pytesseract\n", + "pytesseract.pytesseract.tesseract_cmd = r\"C:\\Program Files\\Tesseract-OCR\\tesseract.exe\"\n", + "import subprocess\n", + "print(subprocess.check_output([pytesseract.pytesseract.tesseract_cmd, '--version']).decode())\n", + "# get languages\n", + "print(pytesseract.get_languages(config=''))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4a9bf3cb", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "id": "96a67ef5", + "metadata": {}, + "source": [ + "## dino v2 with optimization flow" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "a48fa4a2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading DINOv2 model 'facebook/dinov2-small'...\n", + "DINOv2 model loaded successfully.\n", + "Found 54 images. Generating embeddings...\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Baseball 2.sb3\\7be1f5b3e682813dac1f297e52ff7dca.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Beach Malibu.sb3\\050615fe992a00d6af0e664e497ebf53.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Bedroom 3.sb3\\8cc0b88d53345b3e337e8f028a32a4e7.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Blue Sky.sb3\\e7c147730f19d284bcd7b3f00af19bb6.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Castle 2.sb3\\951765ee7f7370f120c9df20b577c22f.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Colorful City.sb3\\04d18ddd1b85f0ea30beb14b8da49f60.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Hall.sb3\\ea86ca30b346f27ca5faf1254f6a31e3.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Jungle.sb3\\f4f908da19e2753f3ed679d7b37650ca.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Soccer.sb3\\04a63154f04b09494354090f7cc2f1b9.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\\Theater.sb3\\c2b097bc5cdb6a14ef5485202bc5ee76.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Batter.sprite3\\592ee9ab2aeefe65cb4fb95fcd046f33.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Batter.sprite3\\9d193bef6e3d6d8eba6d1470b8bf9351.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Batter.sprite3\\baseball_sprite_motion_1.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Batter.sprite3\\bd4fc003528acfa847e45ff82f346eee.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Batter.sprite3\\fdfde4bcbaca0f68e83fdf3f4ef0c660.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Bear.sprite3\\6f303e972f33fcb7ef36d0d8012d0975.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Bear.sprite3\\bear_motion_2.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Bear.sprite3\\deef1eaa96d550ae6fc11524a1935024.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Beetle.sprite3\\46d0dfd4ae7e9bfe3a6a2e35a4905eae.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Butterfly 1.sprite3\\34b76c1835c6a7fc2c47956e49bb0f52.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Butterfly 1.sprite3\\49c9f952007d870a046cff93b6e5e098.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Butterfly 1.sprite3\\fe98df7367e314d9640bfaa54fc239be.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Cat.sprite3\\0fb9be3e8397c983338cb71dc84d0b25.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Cat.sprite3\\bcf454acf82e4504149f7ffe07081dbc.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Centaur.sprite3\\2373556e776cad3ba4d6ee04fc34550b.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Centaur.sprite3\\c00ffa6c5dd0baf9f456b897ff974377.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Centaur.sprite3\\d722329bd9373ad80625e5be6d52f3ed.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Centaur.sprite3\\d7aa990538915b7ef1f496d7e8486ade.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\City Bus.sprite3\\7d7e26014a346b894db8ab1819f2167f.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\City Bus.sprite3\\e9694adbff9422363e2ea03166015393.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Crab.sprite3\\49839aa1b0feed02a3c759db5f8dee71.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Crab.sprite3\\bear_element.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Crab.sprite3\\f7cdd2acbc6d7559d33be8675059c79e.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Glow-G.sprite3\\56839bc48957869d980c6f9b6f5a2a91.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Jordyn.sprite3\\00c8c464c19460df693f8d5ae69afdab.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Jordyn.sprite3\\768c4601174f0dfcb96b3080ccc3a192.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Jordyn.sprite3\\a7cc1e5f02b58ecc8095cfc18eef0289.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Jordyn.sprite3\\db4d97cbf24e2b8af665bfbf06f67fa0.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Soccer Ball.sprite3\\5d973d7a3a8be3f3bd6e1cd0f73c32b5.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Soccer Ball.sprite3\\cat_football.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Star.sprite3\\551629f2a64c1f3703e57aaa133effa6.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Wizard.sprite3\\55ba51188af86ca16ef30267e874c1ed.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Wizard.sprite3\\91d495085eb4d02a375c42f6318071e7.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\\Wizard.sprite3\\df943c9894ee4b9df8c5893ce30c2a5f.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\\script1.JPG: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\\script2.JPG: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\\script3.JPG: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\\script4.JPG: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\\script5.JPG: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\\script6.JPG: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\\script7.JPG: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\\script8.JPG: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\\script9.JPG: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Error processing image D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\\static_white.png: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()\n", + "Embeddings and hashes generated.\n", + "Hybrid embeddings and hashes saved to D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\hybrid_embeddings.json and D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\phash_data.json\n", + "Embedding creation and storage process complete.\n" + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "import numpy as np\n", + "import torch\n", + "from PIL import Image, ImageOps, ImageEnhance\n", + "from imagededup.methods import PHash\n", + "from transformers import AutoImageProcessor, AutoModel\n", + "\n", + "# ============================== #\n", + "# DEFINE PATHS #\n", + "# ============================== #\n", + "backdrop_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\Backdrops\"\n", + "sprite_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\sprites\"\n", + "code_blocks_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\\code_blocks\"\n", + "output_dir = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\samp_code\"\n", + "\n", + "image_dirs = [backdrop_images_path, sprite_images_path, code_blocks_path]\n", + "\n", + "# ======================================= #\n", + "# INITIALIZE DINOv2 EMBEDDER #\n", + "# ======================================= #\n", + "print(\"Loading DINOv2 model 'facebook/dinov2-small'...\")\n", + "DINOV2_MODEL = \"facebook/dinov2-small\"\n", + "DEVICE = torch.device(\"cpu\")\n", + "torch.set_num_threads(4) # tune for your CPU\n", + "\n", + "dinov2_processor = AutoImageProcessor.from_pretrained(DINOV2_MODEL)\n", + "dinov2_model = AutoModel.from_pretrained(DINOV2_MODEL)\n", + "dinov2_model.to(DEVICE)\n", + "print(\"DINOv2 model loaded successfully.\")\n", + "\n", + "# ========================================= #\n", + "# IMAGE PREPROCESSING + HASHING SETUP #\n", + "# ========================================= #\n", + "phash = PHash()\n", + "\n", + "def preprocess_image(image_path):\n", + " \"\"\"\n", + " Preprocess an image to reduce noise and improve matching accuracy.\n", + " Handles images with an alpha channel (transparency) by converting them to RGB.\n", + " Returns a numpy array for compatibility with imagededup.\n", + " \"\"\"\n", + " try:\n", + " img = Image.open(image_path)\n", + " \n", + " # Check for and handle alpha channel (transparency)\n", + " if img.mode == 'RGBA':\n", + " img = img.convert('RGB')\n", + "\n", + " # Convert to grayscale\n", + " img = ImageOps.grayscale(img)\n", + "\n", + " # Apply histogram equalization\n", + " img = ImageOps.equalize(img)\n", + "\n", + " # Resize to a standard size for hashing\n", + " img = img.resize((256, 256))\n", + "\n", + " # Convert to numpy array for imagededup compatibility\n", + " img_array = np.array(img)\n", + " return img_array\n", + " except Exception as e:\n", + " print(f\"Error preprocessing image {image_path}: {e}\")\n", + " return None\n", + "\n", + "def get_dinov2_embedding(image_path):\n", + " \"\"\"\n", + " Loads an image from a path, preprocesses it, and returns the DINOv2 embedding.\n", + " \"\"\"\n", + " try:\n", + " # Preprocess Image\n", + " preprocessed_img = preprocess_image(image_path)\n", + " if not preprocessed_img:\n", + " return None\n", + "\n", + " # Process the final image with DINOv2\n", + " inputs = dinov2_processor(images=preprocessed_img, return_tensors=\"pt\").to(DEVICE)\n", + " with torch.no_grad():\n", + " outputs = dinov2_model(**inputs)\n", + " \n", + " # We use the CLS token embedding from the last layer.\n", + " embedding = outputs.last_hidden_state[:, 0, :].squeeze(0).cpu().numpy()\n", + " return embedding\n", + " except Exception as e:\n", + " print(f\"Error processing image {image_path}: {e}\")\n", + " return None\n", + "\n", + "# ========================================= #\n", + "# Walk folders to collect all image paths #\n", + "# ========================================= #\n", + "folder_image_paths = []\n", + "for image_dir in image_dirs:\n", + " for root, _, files in os.walk(image_dir):\n", + " for fname in files:\n", + " if fname.lower().endswith((\".png\", \".jpg\", \".jpeg\")):\n", + " folder_image_paths.append(os.path.join(root, fname))\n", + "\n", + "if not folder_image_paths:\n", + " print(\"No images found in the specified directories. Exiting.\")\n", + " exit()\n", + "\n", + "print(f\"Found {len(folder_image_paths)} images. Generating embeddings...\")\n", + "\n", + "# ================================== #\n", + "# EMBED AND STORE IN A DICT #\n", + "# ================================== #\n", + "embeddings_dict = {}\n", + "hash_dict = {}\n", + "for path in folder_image_paths:\n", + " embedding = get_dinov2_embedding(path)\n", + " if embedding is not None:\n", + " norm_embedding = embedding / np.linalg.norm(embedding)\n", + " embeddings_dict[path] = norm_embedding.tolist()\n", + "\n", + " # Preprocess the image and generate Perceptual Hash (PHash)\n", + " preprocessed_array = preprocess_image(path)\n", + " if preprocessed_array is not None:\n", + " hash_dict[path] = phash.encode_image(image_array=preprocessed_array)\n", + " else:\n", + " print(f\"Skipping hashing for {path} due to preprocessing failure.\")\n", + "\n", + "print(\"Embeddings and hashes generated.\")\n", + "\n", + "# ============================== #\n", + "# SAVE EMBEDDINGS TO JSON # \n", + "# ============================== #\n", + "embeddings_json_path = os.path.join(output_dir, \"hybrid_embeddings.json\")\n", + "hash_json_path = os.path.join(output_dir, \"phash_data.json\")\n", + "\n", + "with open(embeddings_json_path, \"w\") as f:\n", + " json.dump(embeddings_dict, f, indent=2)\n", + "\n", + "with open(hash_json_path, \"w\") as f:\n", + " json.dump(hash_dict, f, indent=2)\n", + "\n", + "print(f\"Hybrid embeddings and hashes saved to {embeddings_json_path} and {hash_json_path}\")\n", + "print(\"Embedding creation and storage process complete.\")\n", + "\n", + "# ================================================================= #\n", + "# PART 2: SIMILARITY SEARCH FROM THE JSON FILE #\n", + "# ================================================================= #\n", + "\n", + "def cosine_similarity(vec1, vec2):\n", + " \"\"\"\n", + " Calculates the cosine similarity between two numpy vectors.\n", + " Assumes vectors are already normalized.\n", + " \"\"\"\n", + " return np.dot(vec1, vec2)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1fa78c89", + "metadata": {}, + "outputs": [], + "source": [ + "# ============================== #\n", + "# LOAD EMBEDDINGS FROM JSON #\n", + "# ============================== #\n", + "print(\"\\n--- Starting Search Process ---\")\n", + "print(\"Loading hybrid embeddings and hash data from JSON...\")\n", + "with open(embeddings_json_path, \"r\") as f:\n", + " embeddings_dict = json.load(f)\n", + "\n", + "with open(hash_json_path, \"r\") as f:\n", + " hash_dict = json.load(f)\n", + "\n", + "image_paths = list(embeddings_dict.keys())\n", + "image_embeddings = np.array(list(embeddings_dict.values()))\n", + "\n", + "# =================================== #\n", + "# DEFINE QUERY (IMAGE) #\n", + "# =================================== #\n", + "query = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\test_samp\\image (3).png\"\n", + "print(f\"Encoding query image: '{query}'...\")\n", + "\n", + "# =================================== #\n", + "# ENCODE QUERY TO EMBEDDING #\n", + "# =================================== #\n", + "query_embedding = get_dinov2_embedding(query)\n", + "if query_embedding is None:\n", + " print(\"Could not create an embedding for the query. Exiting.\")\n", + " exit()\n", + "\n", + "query_embedding = query_embedding / np.linalg.norm(query_embedding)\n", + "query_hash = phash.encode_image(preprocess_image(query))\n", + "\n", + "# ============================== #\n", + "# PERFORM HYBRID SEARCH #\n", + "# ============================== #\n", + "k = 5 # Number of similar results to retrieve\n", + "print(f\"Searching for the top {k} similar images...\")\n", + "\n", + "similarities = []\n", + "hashing_similarities = []\n", + "for i, stored_embedding in enumerate(image_embeddings):\n", + " similarity = cosine_similarity(query_embedding, stored_embedding)\n", + " similarities.append((similarity, image_paths[i]))\n", + "\n", + " # Hash-based Hamming distance (lower is better)\n", + " stored_hash = hash_dict[image_paths[i]]\n", + " hamming_distance = phash.hamming_distance(query_hash, stored_hash)\n", + " hashing_similarities.append((hamming_distance, image_paths[i]))\n", + "\n", + "# Sort embeddings-based similarities in descending order\n", + "similarities.sort(key=lambda x: x[0], reverse=True)\n", + "\n", + "# Sort hashing similarities in ascending order (lower Hamming distance is better)\n", + "hashing_similarities.sort(key=lambda x: x[0])\n", + "\n", + "# ============================== #\n", + "# DISPLAY RESULTS #\n", + "# ============================== #\n", + "print(\"\\n--- Hybrid Search Results ---\")\n", + "print(\"\\nTop results by DINOv2 Embeddings:\")\n", + "for i in range(min(k, len(similarities))):\n", + " similarity, path = similarities[i]\n", + " print(f\"Rank {i+1}:\")\n", + " print(f\" Path: {path}\")\n", + " print(f\" Similarity Score: {similarity:.4f}\")\n", + " print(\"-\" * 20)\n", + "\n", + "print(\"\\nTop results by PHash Hamming Distance:\")\n", + "for i in range(min(k, len(hashing_similarities))):\n", + " hamming_distance, path = hashing_similarities[i]\n", + " print(f\"Rank {i+1}:\")\n", + " print(f\" Path: {path}\")\n", + " print(f\" Hamming Distance: {hamming_distance}\")\n", + " print(\"-\" * 20)\n", + "\n", + "print(\"Hybrid search complete.\")" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "e7077b3a", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "c:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\tqdm\\auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading DINOv2 model 'facebook/dinov2-small'...\n", + "DINOv2 model loaded successfully on cpu.\n", + "Found 989 images. Generating embeddings and phash data...\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[31m---------------------------------------------------------------------------\u001b[39m", + "\u001b[31mKeyboardInterrupt\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[4]\u001b[39m\u001b[32m, line 285\u001b[39m\n\u001b[32m 279\u001b[39m preproc_cache[path] = {\n\u001b[32m 280\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mpil_for_model\u001b[39m\u001b[33m\"\u001b[39m: preprocess_for_model(bg_added),\n\u001b[32m 281\u001b[39m \u001b[33m\"\u001b[39m\u001b[33marray_for_hash\u001b[39m\u001b[33m\"\u001b[39m: preprocess_for_hash(bg_added)\n\u001b[32m 282\u001b[39m }\n\u001b[32m 284\u001b[39m \u001b[38;5;66;03m# 6) Embedding & hashing\u001b[39;00m\n\u001b[32m--> \u001b[39m\u001b[32m285\u001b[39m emb = \u001b[43mget_dinov2_embedding_from_pil\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpreproc_cache\u001b[49m\u001b[43m[\u001b[49m\u001b[43mpath\u001b[49m\u001b[43m]\u001b[49m\u001b[43m[\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mpil_for_model\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 286\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m emb \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[32m 287\u001b[39m embeddings_dict[path] = emb.tolist()\n", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[4]\u001b[39m\u001b[32m, line 151\u001b[39m, in \u001b[36mget_dinov2_embedding_from_pil\u001b[39m\u001b[34m(pil_img)\u001b[39m\n\u001b[32m 149\u001b[39m inputs = dinov2_processor(images=pil_img, return_tensors=\u001b[33m\"\u001b[39m\u001b[33mpt\u001b[39m\u001b[33m\"\u001b[39m).to(DEVICE)\n\u001b[32m 150\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m torch.no_grad():\n\u001b[32m--> \u001b[39m\u001b[32m151\u001b[39m outputs = \u001b[43mdinov2_model\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43minputs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 152\u001b[39m embedding = outputs.last_hidden_state[:, \u001b[32m0\u001b[39m, :].squeeze(\u001b[32m0\u001b[39m).cpu().numpy()\n\u001b[32m 153\u001b[39m norm = np.linalg.norm(embedding)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1773\u001b[39m, in \u001b[36mModule._wrapped_call_impl\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1771\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m._compiled_call_impl(*args, **kwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[32m 1772\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1773\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1784\u001b[39m, in \u001b[36mModule._call_impl\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1779\u001b[39m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[32m 1780\u001b[39m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[32m 1781\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m._backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._forward_pre_hooks\n\u001b[32m 1782\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[32m 1783\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[32m-> \u001b[39m\u001b[32m1784\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1786\u001b[39m result = \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[32m 1787\u001b[39m called_always_called_hooks = \u001b[38;5;28mset\u001b[39m()\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\transformers\\models\\dinov2\\modeling_dinov2.py:694\u001b[39m, in \u001b[36mDinov2Model.forward\u001b[39m\u001b[34m(self, pixel_values, bool_masked_pos, head_mask, output_attentions, output_hidden_states, return_dict)\u001b[39m\n\u001b[32m 690\u001b[39m head_mask = \u001b[38;5;28mself\u001b[39m.get_head_mask(head_mask, \u001b[38;5;28mself\u001b[39m.config.num_hidden_layers)\n\u001b[32m 692\u001b[39m embedding_output = \u001b[38;5;28mself\u001b[39m.embeddings(pixel_values, bool_masked_pos=bool_masked_pos)\n\u001b[32m--> \u001b[39m\u001b[32m694\u001b[39m encoder_outputs = \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mencoder\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 695\u001b[39m \u001b[43m \u001b[49m\u001b[43membedding_output\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 696\u001b[39m \u001b[43m \u001b[49m\u001b[43mhead_mask\u001b[49m\u001b[43m=\u001b[49m\u001b[43mhead_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 697\u001b[39m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[43m=\u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 698\u001b[39m \u001b[43m \u001b[49m\u001b[43moutput_hidden_states\u001b[49m\u001b[43m=\u001b[49m\u001b[43moutput_hidden_states\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 699\u001b[39m \u001b[43m \u001b[49m\u001b[43mreturn_dict\u001b[49m\u001b[43m=\u001b[49m\u001b[43mreturn_dict\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 700\u001b[39m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 701\u001b[39m sequence_output = encoder_outputs[\u001b[32m0\u001b[39m]\n\u001b[32m 702\u001b[39m sequence_output = \u001b[38;5;28mself\u001b[39m.layernorm(sequence_output)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1773\u001b[39m, in \u001b[36mModule._wrapped_call_impl\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1771\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m._compiled_call_impl(*args, **kwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[32m 1772\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1773\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1784\u001b[39m, in \u001b[36mModule._call_impl\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1779\u001b[39m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[32m 1780\u001b[39m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[32m 1781\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m._backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._forward_pre_hooks\n\u001b[32m 1782\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[32m 1783\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[32m-> \u001b[39m\u001b[32m1784\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1786\u001b[39m result = \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[32m 1787\u001b[39m called_always_called_hooks = \u001b[38;5;28mset\u001b[39m()\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\transformers\\models\\dinov2\\modeling_dinov2.py:512\u001b[39m, in \u001b[36mDinov2Encoder.forward\u001b[39m\u001b[34m(self, hidden_states, head_mask, output_attentions, output_hidden_states, return_dict)\u001b[39m\n\u001b[32m 505\u001b[39m layer_outputs = \u001b[38;5;28mself\u001b[39m._gradient_checkpointing_func(\n\u001b[32m 506\u001b[39m layer_module.\u001b[34m__call__\u001b[39m,\n\u001b[32m 507\u001b[39m hidden_states,\n\u001b[32m 508\u001b[39m layer_head_mask,\n\u001b[32m 509\u001b[39m output_attentions,\n\u001b[32m 510\u001b[39m )\n\u001b[32m 511\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m512\u001b[39m layer_outputs = \u001b[43mlayer_module\u001b[49m\u001b[43m(\u001b[49m\u001b[43mhidden_states\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlayer_head_mask\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 514\u001b[39m hidden_states = layer_outputs[\u001b[32m0\u001b[39m]\n\u001b[32m 516\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m output_attentions:\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1773\u001b[39m, in \u001b[36mModule._wrapped_call_impl\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1771\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m._compiled_call_impl(*args, **kwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[32m 1772\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1773\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1784\u001b[39m, in \u001b[36mModule._call_impl\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1779\u001b[39m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[32m 1780\u001b[39m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[32m 1781\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m._backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._forward_pre_hooks\n\u001b[32m 1782\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[32m 1783\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[32m-> \u001b[39m\u001b[32m1784\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1786\u001b[39m result = \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[32m 1787\u001b[39m called_always_called_hooks = \u001b[38;5;28mset\u001b[39m()\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\transformers\\models\\dinov2\\modeling_dinov2.py:453\u001b[39m, in \u001b[36mDinov2Layer.forward\u001b[39m\u001b[34m(self, hidden_states, head_mask, output_attentions)\u001b[39m\n\u001b[32m 447\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mforward\u001b[39m(\n\u001b[32m 448\u001b[39m \u001b[38;5;28mself\u001b[39m,\n\u001b[32m 449\u001b[39m hidden_states: torch.Tensor,\n\u001b[32m 450\u001b[39m head_mask: Optional[torch.Tensor] = \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[32m 451\u001b[39m output_attentions: \u001b[38;5;28mbool\u001b[39m = \u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[32m 452\u001b[39m ) -> Union[Tuple[torch.Tensor, torch.Tensor], Tuple[torch.Tensor]]:\n\u001b[32m--> \u001b[39m\u001b[32m453\u001b[39m self_attention_outputs = \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mattention\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 454\u001b[39m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mnorm1\u001b[49m\u001b[43m(\u001b[49m\u001b[43mhidden_states\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;66;43;03m# in Dinov2, layernorm is applied before self-attention\u001b[39;49;00m\n\u001b[32m 455\u001b[39m \u001b[43m \u001b[49m\u001b[43mhead_mask\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 456\u001b[39m \u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[43m=\u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 457\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 458\u001b[39m attention_output = self_attention_outputs[\u001b[32m0\u001b[39m]\n\u001b[32m 460\u001b[39m attention_output = \u001b[38;5;28mself\u001b[39m.layer_scale1(attention_output)\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1773\u001b[39m, in \u001b[36mModule._wrapped_call_impl\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1771\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m._compiled_call_impl(*args, **kwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[32m 1772\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1773\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1784\u001b[39m, in \u001b[36mModule._call_impl\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1779\u001b[39m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[32m 1780\u001b[39m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[32m 1781\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m._backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._forward_pre_hooks\n\u001b[32m 1782\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[32m 1783\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[32m-> \u001b[39m\u001b[32m1784\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1786\u001b[39m result = \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[32m 1787\u001b[39m called_always_called_hooks = \u001b[38;5;28mset\u001b[39m()\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\transformers\\models\\dinov2\\modeling_dinov2.py:326\u001b[39m, in \u001b[36mDinov2Attention.forward\u001b[39m\u001b[34m(self, hidden_states, head_mask, output_attentions)\u001b[39m\n\u001b[32m 320\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mforward\u001b[39m(\n\u001b[32m 321\u001b[39m \u001b[38;5;28mself\u001b[39m,\n\u001b[32m 322\u001b[39m hidden_states: torch.Tensor,\n\u001b[32m 323\u001b[39m head_mask: Optional[torch.Tensor] = \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[32m 324\u001b[39m output_attentions: \u001b[38;5;28mbool\u001b[39m = \u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[32m 325\u001b[39m ) -> Union[Tuple[torch.Tensor, torch.Tensor], Tuple[torch.Tensor]]:\n\u001b[32m--> \u001b[39m\u001b[32m326\u001b[39m self_outputs = \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mattention\u001b[49m\u001b[43m(\u001b[49m\u001b[43mhidden_states\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mhead_mask\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43moutput_attentions\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 328\u001b[39m attention_output = \u001b[38;5;28mself\u001b[39m.output(self_outputs[\u001b[32m0\u001b[39m], hidden_states)\n\u001b[32m 330\u001b[39m outputs = (attention_output,) + self_outputs[\u001b[32m1\u001b[39m:] \u001b[38;5;66;03m# add attentions if we output them\u001b[39;00m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1773\u001b[39m, in \u001b[36mModule._wrapped_call_impl\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1771\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m._compiled_call_impl(*args, **kwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[32m 1772\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1773\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1784\u001b[39m, in \u001b[36mModule._call_impl\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1779\u001b[39m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[32m 1780\u001b[39m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[32m 1781\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m._backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._forward_pre_hooks\n\u001b[32m 1782\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[32m 1783\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[32m-> \u001b[39m\u001b[32m1784\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1786\u001b[39m result = \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[32m 1787\u001b[39m called_always_called_hooks = \u001b[38;5;28mset\u001b[39m()\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\transformers\\models\\dinov2\\modeling_dinov2.py:252\u001b[39m, in \u001b[36mDinov2SdpaSelfAttention.forward\u001b[39m\u001b[34m(self, hidden_states, head_mask, output_attentions)\u001b[39m\n\u001b[32m 244\u001b[39m logger.warning_once(\n\u001b[32m 245\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mDinov2Model is using Dinov2SdpaSelfAttention, but `torch.nn.functional.scaled_dot_product_attention` does not support `output_attentions=True`. Falling back to the manual attention implementation, \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 246\u001b[39m \u001b[33m'\u001b[39m\u001b[33mbut specifying the manual implementation will be required from Transformers version v5.0.0 onwards. This warning can be removed using the argument `attn_implementation=\u001b[39m\u001b[33m\"\u001b[39m\u001b[33meager\u001b[39m\u001b[33m\"\u001b[39m\u001b[33m` when loading the model.\u001b[39m\u001b[33m'\u001b[39m\n\u001b[32m 247\u001b[39m )\n\u001b[32m 248\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28msuper\u001b[39m().forward(\n\u001b[32m 249\u001b[39m hidden_states=hidden_states, head_mask=head_mask, output_attentions=output_attentions\n\u001b[32m 250\u001b[39m )\n\u001b[32m--> \u001b[39m\u001b[32m252\u001b[39m mixed_query_layer = \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mquery\u001b[49m\u001b[43m(\u001b[49m\u001b[43mhidden_states\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 254\u001b[39m key_layer = \u001b[38;5;28mself\u001b[39m.transpose_for_scores(\u001b[38;5;28mself\u001b[39m.key(hidden_states))\n\u001b[32m 255\u001b[39m value_layer = \u001b[38;5;28mself\u001b[39m.transpose_for_scores(\u001b[38;5;28mself\u001b[39m.value(hidden_states))\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1773\u001b[39m, in \u001b[36mModule._wrapped_call_impl\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1771\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m._compiled_call_impl(*args, **kwargs) \u001b[38;5;66;03m# type: ignore[misc]\u001b[39;00m\n\u001b[32m 1772\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1773\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_call_impl\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\module.py:1784\u001b[39m, in \u001b[36mModule._call_impl\u001b[39m\u001b[34m(self, *args, **kwargs)\u001b[39m\n\u001b[32m 1779\u001b[39m \u001b[38;5;66;03m# If we don't have any hooks, we want to skip the rest of the logic in\u001b[39;00m\n\u001b[32m 1780\u001b[39m \u001b[38;5;66;03m# this function, and just call forward.\u001b[39;00m\n\u001b[32m 1781\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m (\u001b[38;5;28mself\u001b[39m._backward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._forward_hooks \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m._forward_pre_hooks\n\u001b[32m 1782\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m _global_backward_pre_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_backward_hooks\n\u001b[32m 1783\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m _global_forward_hooks \u001b[38;5;129;01mor\u001b[39;00m _global_forward_pre_hooks):\n\u001b[32m-> \u001b[39m\u001b[32m1784\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mforward_call\u001b[49m\u001b[43m(\u001b[49m\u001b[43m*\u001b[49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1786\u001b[39m result = \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[32m 1787\u001b[39m called_always_called_hooks = \u001b[38;5;28mset\u001b[39m()\n", + "\u001b[36mFile \u001b[39m\u001b[32mc:\\Users\\Admin\\miniconda3\\envs\\scratch_env\\Lib\\site-packages\\torch\\nn\\modules\\linear.py:125\u001b[39m, in \u001b[36mLinear.forward\u001b[39m\u001b[34m(self, input)\u001b[39m\n\u001b[32m 124\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mforward\u001b[39m(\u001b[38;5;28mself\u001b[39m, \u001b[38;5;28minput\u001b[39m: Tensor) -> Tensor:\n\u001b[32m--> \u001b[39m\u001b[32m125\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mF\u001b[49m\u001b[43m.\u001b[49m\u001b[43mlinear\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mweight\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mbias\u001b[49m\u001b[43m)\u001b[49m\n", + "\u001b[31mKeyboardInterrupt\u001b[39m: " + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "import base64\n", + "import io\n", + "import numpy as np\n", + "import torch\n", + "import cv2\n", + "from PIL import Image, ImageOps, ImageEnhance\n", + "from imagededup.methods import PHash\n", + "from transformers import AutoImageProcessor, AutoModel\n", + "from pathlib import Path\n", + "\n", + "# ============================== #\n", + "# DEFINE PATHS #\n", + "# ============================== #\n", + "backdrop_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\"\n", + "sprite_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\"\n", + "code_blocks_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\"\n", + "output_dir = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\"\n", + "\n", + "image_dirs = [backdrop_images_path, sprite_images_path, code_blocks_path]\n", + "\n", + "# Where processed (bg-added) images will be saved for inspection\n", + "processed_dir = os.path.join(output_dir, \"processed_with_bg\")\n", + "os.makedirs(processed_dir, exist_ok=True)\n", + "\n", + "# ======================================= #\n", + "# INITIALIZE DINOv2 EMBEDDER #\n", + "# ======================================= #\n", + "print(\"Loading DINOv2 model 'facebook/dinov2-small'...\")\n", + "DINOV2_MODEL = \"facebook/dinov2-small\"\n", + "DEVICE = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", + "if DEVICE.type == \"cpu\":\n", + " torch.set_num_threads(4)\n", + "\n", + "dinov2_processor = AutoImageProcessor.from_pretrained(DINOV2_MODEL)\n", + "dinov2_model = AutoModel.from_pretrained(DINOV2_MODEL)\n", + "dinov2_model.to(DEVICE)\n", + "dinov2_model.eval()\n", + "print(f\"DINOv2 model loaded successfully on {DEVICE}.\")\n", + "\n", + "# ========================================= #\n", + "# IMAGE PREPROCESSING + HASHING SETUP #\n", + "# ========================================= #\n", + "phash = PHash()\n", + "\n", + "# ------------ base64 helpers --------------\n", + "def pil_to_base64(pil_img, fmt=\"PNG\"):\n", + " \"\"\"\n", + " Convert a PIL.Image to a base64 string.\n", + " \"\"\"\n", + " buffered = io.BytesIO()\n", + " pil_img.save(buffered, format=fmt)\n", + " b64 = base64.b64encode(buffered.getvalue()).decode(\"utf-8\")\n", + " return b64\n", + "\n", + "def base64_to_pil(b64):\n", + " \"\"\"\n", + " Convert base64 string back to PIL.Image\n", + " \"\"\"\n", + " try:\n", + " data = base64.b64decode(b64)\n", + " return Image.open(io.BytesIO(data))\n", + " except Exception as e:\n", + " print(f\"Failed to decode base64 to PIL: {e}\")\n", + " return None\n", + "\n", + "# ------------ image helpers --------------\n", + "def load_image(image_path):\n", + " try:\n", + " img = Image.open(image_path)\n", + " return img\n", + " except Exception as e:\n", + " print(f\"Failed to open image {image_path}: {e}\")\n", + " return None\n", + "\n", + "def add_background(pil_img, bg_color=(255, 255, 255), size=None):\n", + " \"\"\"\n", + " Add a background behind the image.\n", + " - If image has alpha, composite onto bg_color.\n", + " - If `size` provided, will paste the image centered on that canvas size.\n", + " Returns a new PIL.Image in RGB mode.\n", + " \"\"\"\n", + " if pil_img is None:\n", + " return None\n", + " try:\n", + " if size is None:\n", + " target_size = pil_img.size\n", + " else:\n", + " target_size = size\n", + "\n", + " bg = Image.new(\"RGB\", target_size, bg_color)\n", + " img_rgba = pil_img.convert(\"RGBA\")\n", + "\n", + " if img_rgba.size != target_size:\n", + " x = (target_size[0] - img_rgba.size[0]) // 2\n", + " y = (target_size[1] - img_rgba.size[1]) // 2\n", + " else:\n", + " x, y = 0, 0\n", + "\n", + " # Paste using alpha channel as mask if present\n", + " mask = img_rgba.split()[3] if img_rgba.mode == \"RGBA\" else None\n", + " bg.paste(img_rgba.convert(\"RGB\"), (x, y), mask=mask)\n", + " return bg\n", + " except Exception as e:\n", + " print(f\"Error adding background: {e}\")\n", + " return None\n", + "\n", + "def preprocess_for_hash(pil_img, size=(256, 256)):\n", + " \"\"\"\n", + " Convert to grayscale, equalize and return numpy array for PHash.\n", + " \"\"\"\n", + " try:\n", + " img = pil_img.convert(\"RGB\")\n", + " img = ImageOps.grayscale(img)\n", + " img = ImageOps.equalize(img)\n", + " img = img.resize(size)\n", + " arr = np.array(img).astype(np.uint8)\n", + " return arr\n", + " except Exception as e:\n", + " print(f\"Error in preprocess_for_hash: {e}\")\n", + " return None\n", + "\n", + "def preprocess_for_model(pil_img):\n", + " \"\"\"\n", + " Return a PIL.Image suitable for the DINOv2 processor (RGB).\n", + " The processor will handle resizing/normalization.\n", + " \"\"\"\n", + " try:\n", + " img = pil_img\n", + " if img.mode == \"RGBA\":\n", + " img = img.convert(\"RGB\")\n", + " elif img.mode == \"L\":\n", + " img = img.convert(\"RGB\")\n", + " else:\n", + " img = img.convert(\"RGB\")\n", + " return img\n", + " except Exception as e:\n", + " print(f\"Error in preprocess_for_model: {e}\")\n", + " return None\n", + "\n", + "def get_dinov2_embedding_from_pil(pil_img):\n", + " \"\"\"\n", + " Accepts a PIL.Image, runs DINOv2 and returns a normalized numpy vector or None.\n", + " \"\"\"\n", + " try:\n", + " if pil_img is None:\n", + " return None\n", + " inputs = dinov2_processor(images=pil_img, return_tensors=\"pt\").to(DEVICE)\n", + " with torch.no_grad():\n", + " outputs = dinov2_model(**inputs)\n", + " embedding = outputs.last_hidden_state[:, 0, :].squeeze(0).cpu().numpy()\n", + " norm = np.linalg.norm(embedding)\n", + " if norm == 0 or np.isnan(norm):\n", + " return None\n", + " return embedding / norm\n", + " except Exception as e:\n", + " print(f\"Error processing image in get_dinov2_embedding_from_pil: {e}\")\n", + " return None\n", + "\n", + "# ========================================= #\n", + "# OpenCV enhancement (adapted to accept PIL input)\n", + "# ========================================= #\n", + "def pil_to_bgr_np(pil_img):\n", + " arr = np.array(pil_img.convert(\"RGB\"))\n", + " bgr = cv2.cvtColor(arr, cv2.COLOR_RGB2BGR)\n", + " return bgr\n", + "\n", + "def bgr_np_to_pil(bgr_np):\n", + " rgb = cv2.cvtColor(bgr_np, cv2.COLOR_BGR2RGB)\n", + " return Image.fromarray(rgb)\n", + "\n", + "def upscale_image(image, scale=2):\n", + " height, width = image.shape[:2]\n", + " upscaled_image = cv2.resize(image, (width * scale, height * scale), interpolation=cv2.INTER_CUBIC)\n", + " return upscaled_image\n", + "\n", + "def reduce_noise(image):\n", + " return cv2.fastNlMeansDenoisingColored(image, None, 10, 10, 7, 21)\n", + "\n", + "def sharpen_image(image):\n", + " kernel = np.array([[0, -1, 0],\n", + " [-1, 5, -1],\n", + " [0, -1, 0]])\n", + " sharpened_image = cv2.filter2D(image, -1, kernel)\n", + " return sharpened_image\n", + "\n", + "def enhance_image_cv2(image):\n", + " pil_img = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))\n", + " enhancer = ImageEnhance.Contrast(pil_img)\n", + " enhanced_image = enhancer.enhance(1.5)\n", + " enhanced_image_bgr = cv2.cvtColor(np.array(enhanced_image), cv2.COLOR_RGB2BGR)\n", + " return enhanced_image_bgr\n", + "\n", + "def process_image_cv2_from_pil(pil_img, scale=2):\n", + " \"\"\"\n", + " Accept PIL.Image, convert to BGR numpy, run enhancement pipeline, return PIL.Image (RGB).\n", + " \"\"\"\n", + " try:\n", + " bgr = pil_to_bgr_np(pil_img)\n", + " upscaled = upscale_image(bgr, scale=scale) if scale != 1 else bgr\n", + " denoised = reduce_noise(upscaled)\n", + " sharpened = sharpen_image(denoised)\n", + " final_bgr = enhance_image_cv2(sharpened)\n", + " return bgr_np_to_pil(final_bgr)\n", + " except Exception as e:\n", + " print(f\"OpenCV pipeline failed: {e}\")\n", + " return None\n", + "\n", + "# ========================================= #\n", + "# Walk folders to collect all image paths #\n", + "# ========================================= #\n", + "folder_image_paths = []\n", + "for image_dir in image_dirs:\n", + " for root, _, files in os.walk(image_dir):\n", + " for fname in files:\n", + " if fname.lower().endswith((\".png\", \".jpg\", \".jpeg\", \".bmp\", \".gif\")):\n", + " folder_image_paths.append(os.path.join(root, fname))\n", + "\n", + "if not folder_image_paths:\n", + " print(\"No images found in the specified directories. Exiting.\")\n", + " exit()\n", + "\n", + "print(f\"Found {len(folder_image_paths)} images. Generating embeddings and phash data...\")\n", + "\n", + "# ================================== #\n", + "# EMBED AND STORE IN A DICT #\n", + "# ================================== #\n", + "embeddings_dict = {} # maps path -> embedding list\n", + "hash_dict = {} # maps path -> phash string\n", + "base64_map = {} # maps path -> bg-added image base64 (so you can inspect exact image used for embedding)\n", + "\n", + "preproc_cache = {}\n", + "\n", + "for path in folder_image_paths:\n", + " pil = load_image(path)\n", + " if pil is None:\n", + " print(f\"Skipping {path} (failed to load).\")\n", + " continue\n", + "\n", + " # 1) FIRST LAYER: convert to base64 (original)\n", + " try:\n", + " orig_b64 = pil_to_base64(pil, fmt=\"PNG\")\n", + " except Exception as e:\n", + " print(f\"Failed to convert original to base64 for {path}: {e}\")\n", + " continue\n", + "\n", + " # 2) Convert base64 back to PIL (per your requested flow)\n", + " pil_from_b64 = base64_to_pil(orig_b64)\n", + " if pil_from_b64 is None:\n", + " print(f\"Failed to decode base64 for {path}. Skipping.\")\n", + " continue\n", + "\n", + " # 3) Add background to the base64-decoded image (bg-added image is the one used for embedding & hashing)\n", + " bg_added = add_background(pil_from_b64, bg_color=(255,255,255), size=pil_from_b64.size)\n", + " if bg_added is None:\n", + " print(f\"Skipping {path} (failed to add background).\")\n", + " continue\n", + "\n", + " # Save processed background-added image for testing/inspection\n", + " save_name = os.path.join(processed_dir, os.path.basename(path))\n", + " try:\n", + " bg_added.save(save_name, format=\"PNG\")\n", + " except Exception as e:\n", + " try:\n", + " bg_added.convert(\"RGB\").save(save_name, format=\"PNG\")\n", + " except Exception as e2:\n", + " print(f\"Failed to save processed image for {path}: {e2}\")\n", + "\n", + " # 4) Store base64 of the bg-added image (so the exact image used for embedding/hashing is recorded)\n", + " try:\n", + " bg_b64 = pil_to_base64(bg_added, fmt=\"PNG\")\n", + " base64_map[path] = bg_b64\n", + " except Exception as e:\n", + " print(f\"Failed to get base64 of bg-added image for {path}: {e}\")\n", + " base64_map[path] = None\n", + "\n", + " # 5) Cache both forms\n", + " preproc_cache[path] = {\n", + " \"pil_for_model\": preprocess_for_model(bg_added),\n", + " \"array_for_hash\": preprocess_for_hash(bg_added)\n", + " }\n", + "\n", + " # 6) Embedding & hashing\n", + " emb = get_dinov2_embedding_from_pil(preproc_cache[path][\"pil_for_model\"])\n", + " if emb is not None:\n", + " embeddings_dict[path] = emb.tolist()\n", + " else:\n", + " print(f\"Skipping embedding for {path} due to failure.\")\n", + "\n", + " arr = preproc_cache[path][\"array_for_hash\"]\n", + " if arr is not None:\n", + " try:\n", + " hash_dict[path] = phash.encode_image(image_array=arr)\n", + " except Exception as e:\n", + " print(f\"Phash encode failed for {path}: {e}\")\n", + " else:\n", + " print(f\"Skipping hashing for {path} due to preprocessing failure.\")\n", + "\n", + "print(\"Embeddings and hashes generated.\")\n", + "print(f\"Processed images saved to: {processed_dir}\")\n", + "\n", + "# ============================== #\n", + "# SAVE EMBEDDINGS TO JSON #\n", + "# ============================== #\n", + "embeddings_json_path = os.path.join(output_dir, \"hybrid_embeddings.json\")\n", + "hash_json_path = os.path.join(output_dir, \"phash_data.json\")\n", + "base64_json_path = os.path.join(output_dir, \"base64_map.json\")\n", + "\n", + "os.makedirs(output_dir, exist_ok=True)\n", + "with open(embeddings_json_path, \"w\") as f:\n", + " json.dump(embeddings_dict, f, indent=2)\n", + "\n", + "with open(hash_json_path, \"w\") as f:\n", + " json.dump(hash_dict, f, indent=2)\n", + "\n", + "with open(base64_json_path, \"w\") as f:\n", + " json.dump(base64_map, f, indent=2)\n", + "\n", + "print(f\"Hybrid embeddings and hashes saved to {embeddings_json_path}, {hash_json_path}\")\n", + "print(f\"BG-added image base64 map saved to {base64_json_path}\")\n", + "print(\"Embedding creation and storage process complete.\")\n", + "\n", + "# ================================================================= #\n", + "# PART 2: SIMILARITY SEARCH FROM THE JSON FILE (QUERY PATH) #\n", + "# ================================================================= #\n", + "def cosine_similarity(vec1, vec2):\n", + " return float(np.dot(vec1, vec2))" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "c338d8f0", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "--- Starting Search Process ---\n", + "Loading hybrid embeddings and hash data from JSON...\n" + ] + }, + { + "ename": "NameError", + "evalue": "name 'embeddings_json_path' is not defined", + "output_type": "error", + "traceback": [ + "\u001b[31m---------------------------------------------------------------------------\u001b[39m", + "\u001b[31mNameError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[4]\u001b[39m\u001b[32m, line 3\u001b[39m\n\u001b[32m 1\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[33m--- Starting Search Process ---\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m 2\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33m\"\u001b[39m\u001b[33mLoading hybrid embeddings and hash data from JSON...\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m----> \u001b[39m\u001b[32m3\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mopen\u001b[39m(\u001b[43membeddings_json_path\u001b[49m, \u001b[33m\"\u001b[39m\u001b[33mr\u001b[39m\u001b[33m\"\u001b[39m) \u001b[38;5;28;01mas\u001b[39;00m f:\n\u001b[32m 4\u001b[39m embeddings_dict = json.load(f)\n\u001b[32m 6\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mopen\u001b[39m(hash_json_path, \u001b[33m\"\u001b[39m\u001b[33mr\u001b[39m\u001b[33m\"\u001b[39m) \u001b[38;5;28;01mas\u001b[39;00m f:\n", + "\u001b[31mNameError\u001b[39m: name 'embeddings_json_path' is not defined" + ] + } + ], + "source": [ + "print(\"\\n--- Starting Search Process ---\")\n", + "print(\"Loading hybrid embeddings and hash data from JSON...\")\n", + "with open(embeddings_json_path, \"r\") as f:\n", + " embeddings_dict = json.load(f)\n", + "\n", + "with open(hash_json_path, \"r\") as f:\n", + " hash_dict = json.load(f)\n", + "\n", + "image_paths = list(embeddings_dict.keys())\n", + "image_embeddings = np.array(list(embeddings_dict.values()), dtype=float)\n", + "\n", + "# =================================== #\n", + "# DEFINE QUERY (IMAGE) #\n", + "# =================================== #\n", + "query = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\test_samp\\image (3).png\"\n", + "print(f\"Encoding query image: '{query}'...\")\n", + "\n", + "# ------------------------------\n", + "# Query flow you requested:\n", + "# 1) convert to base64\n", + "# 2) preprocess (OpenCV enhancement)\n", + "# 3) embed -> then score (hybrid)\n", + "# ------------------------------\n", + "query_pil_orig = load_image(query)\n", + "if query_pil_orig is None:\n", + " print(\"Could not open query image. Exiting.\")\n", + " exit()\n", + "\n", + "# 1) convert to base64 (first layer)\n", + "try:\n", + " query_b64 = pil_to_base64(query_pil_orig, fmt=\"PNG\")\n", + "except Exception as e:\n", + " print(f\"Failed to convert query to base64: {e}\")\n", + " exit()\n", + "\n", + "# 2) convert back to PIL (simulate base64 roundtrip)\n", + "query_from_b64 = base64_to_pil(query_b64)\n", + "if query_from_b64 is None:\n", + " print(\"Could not decode query base64. Exiting.\")\n", + " exit()\n", + "\n", + "# 3) preprocess using OpenCV enhancement pipeline (adapted to accept PIL)\n", + "enhanced_query_pil = process_image_cv2_from_pil(query_from_b64, scale=2)\n", + "if enhanced_query_pil is None:\n", + " print(\"OpenCV preprocessing failed for query; falling back to base64-decoded PIL.\")\n", + " enhanced_query_pil = query_from_b64\n", + "\n", + "# 4) Add background to the enhanced image (so the image used for embedding/hashing follows same bg flow)\n", + "query_bg = add_background(enhanced_query_pil, bg_color=(255,255,255), size=enhanced_query_pil.size)\n", + "query_save_path = os.path.join(processed_dir, \"query_bg_\" + os.path.basename(query))\n", + "try:\n", + " query_bg.save(query_save_path, format=\"PNG\")\n", + "except Exception:\n", + " pass\n", + "\n", + "# 5) Build DINOv2 embedding and phash from the processed query\n", + "query_emb = get_dinov2_embedding_from_pil(preprocess_for_model(query_bg))\n", + "if query_emb is None:\n", + " print(\"Could not create an embedding for the query. Exiting.\")\n", + " exit()\n", + "\n", + "query_hash_array = preprocess_for_hash(query_bg)\n", + "if query_hash_array is None:\n", + " print(\"Could not create phash for query. Exiting.\")\n", + " exit()\n", + "query_hash = phash.encode_image(image_array=query_hash_array)\n", + "\n", + "# ============================== #\n", + "# PERFORM HYBRID SEARCH #\n", + "# ============================== #\n", + "k = 5\n", + "print(f\"Searching for the top {k} similar images...\")\n", + "\n", + "similarities = []\n", + "hashing_similarities = []\n", + "for i, stored_embedding in enumerate(image_embeddings):\n", + " similarity = cosine_similarity(query_emb, stored_embedding)\n", + " similarities.append((similarity, image_paths[i]))\n", + "\n", + " stored_hash = hash_dict.get(image_paths[i])\n", + " if stored_hash is not None:\n", + " try:\n", + " hamming_distance = phash.hamming_distance(query_hash, stored_hash)\n", + " except Exception as e:\n", + " hamming_distance = 9999\n", + " print(f\"Hamming error for {image_paths[i]}: {e}\")\n", + " else:\n", + " hamming_distance = 9999\n", + " hashing_similarities.append((hamming_distance, image_paths[i]))\n", + "\n", + "similarities.sort(key=lambda x: x[0], reverse=True)\n", + "hashing_similarities.sort(key=lambda x: x[0])\n", + "\n", + "print(\"\\n--- Hybrid Search Results ---\")\n", + "print(\"\\nTop results by DINOv2 Embeddings:\")\n", + "for i in range(min(k, len(similarities))):\n", + " score, path = similarities[i]\n", + " print(f\"Rank {i+1}: Path: {path} | Similarity Score: {score:.4f}\")\n", + "\n", + "print(\"\\nTop results by PHash Hamming Distance:\")\n", + "for i in range(min(k, len(hashing_similarities))):\n", + " dist, path = hashing_similarities[i]\n", + " print(f\"Rank {i+1}: Path: {path} | Hamming Distance: {dist}\")\n", + "\n", + "print(\"Hybrid search complete.\")" + ] + }, + { + "cell_type": "markdown", + "id": "01cda583", + "metadata": {}, + "source": [ + "# BEST hybrid flow here " + ] + }, + { + "cell_type": "markdown", + "id": "4efc5bd2", + "metadata": {}, + "source": [ + "## Generating the Embedding by hybrid approach" + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "id": "ae6136fc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Initializing models and helpers...\n", + "DINOv2 loaded on cpu. PHash & ImageSignature ready.\n", + "Found 991 images. Indexing them...\n", + "Indexed 50 images...\n", + "Indexed 100 images...\n", + "Indexed 150 images...\n", + "Indexed 200 images...\n", + "Indexed 250 images...\n", + "Indexed 300 images...\n", + "Indexed 350 images...\n", + "Indexed 400 images...\n", + "Indexed 450 images...\n", + "Indexed 500 images...\n", + "Indexed 550 images...\n", + "Indexed 600 images...\n", + "Indexed 650 images...\n", + "Indexed 700 images...\n", + "Indexed 750 images...\n", + "Indexed 800 images...\n", + "Indexed 850 images...\n", + "Indexed 900 images...\n", + "Indexed 950 images...\n", + "Indexing complete: 991 embeddings, 991 phashes, 991 signatures.\n", + "Saved embeddings -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\hybrid_embeddings.json\n", + "Saved phash -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\phash_data.json\n", + "Saved image signatures -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\image_signatures.json\n", + "Saved bg-added base64 map -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\base64_map.json\n" + ] + } + ], + "source": [ + "#!/usr/bin/env python3\n", + "\"\"\"\n", + "Complete hybrid image matching pipeline:\n", + " - Dataset indexing: base64 -> bg-added -> DINOv2 embedding + PHash + ImageSignature (image-match)\n", + " - Query search: base64 -> OpenCV preprocess -> embedding + pHash + ImageSignature -> score\n", + "Saves JSON outputs:\n", + " - hybrid_embeddings.json\n", + " - phash_data.json\n", + " - image_signatures.json (serializable)\n", + " - base64_map.json (bg-added image base64 per path)\n", + "\"\"\"\n", + "import os\n", + "import io\n", + "import json\n", + "import base64\n", + "import numpy as np\n", + "from pathlib import Path\n", + "\n", + "# torch/transformers\n", + "import torch\n", + "from transformers import AutoImageProcessor, AutoModel\n", + "\n", + "# image tools\n", + "from PIL import Image, ImageOps, ImageEnhance\n", + "import cv2\n", + "\n", + "# hashing & image-match\n", + "from imagededup.methods import PHash\n", + "from image_match.goldberg import ImageSignature\n", + "\n", + "# -----------------------\n", + "# CONFIG / PATHS\n", + "# -----------------------\n", + "backdrop_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\"\n", + "sprite_images_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\"\n", + "code_blocks_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\"\n", + "output_dir = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\"\n", + "\n", + "image_dirs = [backdrop_images_path, sprite_images_path, code_blocks_path]\n", + "processed_dir = os.path.join(output_dir, \"processed_with_bg\")\n", + "os.makedirs(processed_dir, exist_ok=True)\n", + "os.makedirs(output_dir, exist_ok=True)\n", + "\n", + "# Query image (adjust path as needed)\n", + "QUERY_PATH = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\test_samp\\image (3).png\"\n", + "\n", + "# DINOv2 model id\n", + "DINOV2_MODEL = \"facebook/dinov2-small\"\n", + "\n", + "# For PHash normalization when combining scores: assumed max hamming bits (typical phash=64)\n", + "MAX_PHASH_BITS = 64\n", + "\n", + "# -----------------------\n", + "# INITIALIZE MODELS\n", + "# -----------------------\n", + "print(\"Initializing models and helpers...\")\n", + "DEVICE = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", + "if DEVICE.type == \"cpu\":\n", + " torch.set_num_threads(4)\n", + "\n", + "dinov2_processor = AutoImageProcessor.from_pretrained(DINOV2_MODEL)\n", + "dinov2_model = AutoModel.from_pretrained(DINOV2_MODEL)\n", + "dinov2_model.to(DEVICE)\n", + "dinov2_model.eval()\n", + "\n", + "phash = PHash()\n", + "gis = ImageSignature()\n", + "\n", + "print(f\"DINOv2 loaded on {DEVICE}. PHash & ImageSignature ready.\")\n", + "\n", + "# -----------------------\n", + "# SERIALIZABLE HELPER\n", + "# -----------------------\n", + "def make_json_serializable(obj):\n", + " \"\"\"Recursively convert numpy and other objects into JSON-serializable types.\"\"\"\n", + " if obj is None:\n", + " return None\n", + " if isinstance(obj, (str, int, float, bool)):\n", + " return obj\n", + " if isinstance(obj, np.ndarray):\n", + " return obj.tolist()\n", + " if isinstance(obj, dict):\n", + " return {str(k): make_json_serializable(v) for k, v in obj.items()}\n", + " if isinstance(obj, (list, tuple)):\n", + " return [make_json_serializable(v) for v in obj]\n", + " # some image-match signatures may contain numpy, so try .tolist\n", + " try:\n", + " return obj.tolist()\n", + " except Exception:\n", + " pass\n", + " # fallback to string\n", + " return str(obj)\n", + "\n", + "# -----------------------\n", + "# BASE64 <-> PIL\n", + "# -----------------------\n", + "def pil_to_base64(pil_img, fmt=\"PNG\"):\n", + " buffer = io.BytesIO()\n", + " pil_img.save(buffer, format=fmt)\n", + " return base64.b64encode(buffer.getvalue()).decode(\"utf-8\")\n", + "\n", + "def base64_to_pil(b64):\n", + " try:\n", + " data = base64.b64decode(b64)\n", + " return Image.open(io.BytesIO(data))\n", + " except Exception as e:\n", + " print(f\"[base64_to_pil] Error: {e}\")\n", + " return None\n", + "\n", + "# -----------------------\n", + "# PIL helpers\n", + "# -----------------------\n", + "def load_image_pil(path):\n", + " try:\n", + " return Image.open(path)\n", + " except Exception as e:\n", + " print(f\"[load_image_pil] Could not open {path}: {e}\")\n", + " return None\n", + "\n", + "def add_background(pil_img, bg_color=(255,255,255), size=None):\n", + " if pil_img is None:\n", + " return None\n", + " try:\n", + " target = size if size is not None else pil_img.size\n", + " bg = Image.new(\"RGB\", target, bg_color)\n", + " img_rgba = pil_img.convert(\"RGBA\")\n", + " if img_rgba.size != target:\n", + " x = (target[0] - img_rgba.size[0]) // 2\n", + " y = (target[1] - img_rgba.size[1]) // 2\n", + " else:\n", + " x, y = 0, 0\n", + " mask = img_rgba.split()[3] if img_rgba.mode == \"RGBA\" else None\n", + " bg.paste(img_rgba.convert(\"RGB\"), (x,y), mask=mask)\n", + " return bg\n", + " except Exception as e:\n", + " print(f\"[add_background] Error: {e}\")\n", + " return None\n", + "\n", + "def preprocess_for_hash(pil_img, size=(256,256)):\n", + " try:\n", + " img = pil_img.convert(\"RGB\")\n", + " img = ImageOps.grayscale(img)\n", + " img = ImageOps.equalize(img)\n", + " img = img.resize(size)\n", + " return np.array(img).astype(np.uint8)\n", + " except Exception as e:\n", + " print(f\"[preprocess_for_hash] Error: {e}\")\n", + " return None\n", + "\n", + "def preprocess_for_model(pil_img):\n", + " try:\n", + " if pil_img.mode == \"RGBA\":\n", + " pil_img = pil_img.convert(\"RGB\")\n", + " elif pil_img.mode == \"L\":\n", + " pil_img = pil_img.convert(\"RGB\")\n", + " else:\n", + " pil_img = pil_img.convert(\"RGB\")\n", + " return pil_img\n", + " except Exception as e:\n", + " print(f\"[preprocess_for_model] Error: {e}\")\n", + " return None\n", + "\n", + "def get_dinov2_embedding_from_pil(pil_img):\n", + " try:\n", + " if pil_img is None:\n", + " return None\n", + " inputs = dinov2_processor(images=pil_img, return_tensors=\"pt\").to(DEVICE)\n", + " with torch.no_grad():\n", + " outputs = dinov2_model(**inputs)\n", + " # CLS token embedding\n", + " emb = outputs.last_hidden_state[:,0,:].squeeze(0).cpu().numpy()\n", + " n = np.linalg.norm(emb)\n", + " if n == 0 or np.isnan(n):\n", + " return None\n", + " return (emb / n).astype(float)\n", + " except Exception as e:\n", + " print(f\"[get_dinov2_embedding_from_pil] Error: {e}\")\n", + " return None\n", + "\n", + "# -----------------------\n", + "# OpenCV enhancement (accepts PIL)\n", + "# -----------------------\n", + "def pil_to_bgr_np(pil_img):\n", + " arr = np.array(pil_img.convert(\"RGB\"))\n", + " return cv2.cvtColor(arr, cv2.COLOR_RGB2BGR)\n", + "\n", + "def bgr_np_to_pil(bgr_np):\n", + " rgb = cv2.cvtColor(bgr_np, cv2.COLOR_BGR2RGB)\n", + " return Image.fromarray(rgb)\n", + "\n", + "def upscale_image_cv(bgr_np, scale=2):\n", + " h,w = bgr_np.shape[:2]\n", + " return cv2.resize(bgr_np, (w*scale, h*scale), interpolation=cv2.INTER_CUBIC)\n", + "\n", + "def reduce_noise_cv(bgr_np):\n", + " return cv2.fastNlMeansDenoisingColored(bgr_np, None, 10,10,7,21)\n", + "\n", + "def sharpen_cv(bgr_np):\n", + " kernel = np.array([[0,-1,0],[-1,5,-1],[0,-1,0]])\n", + " return cv2.filter2D(bgr_np, -1, kernel)\n", + "\n", + "def enhance_contrast_cv(bgr_np):\n", + " pil_img = Image.fromarray(cv2.cvtColor(bgr_np, cv2.COLOR_BGR2RGB))\n", + " enhancer = ImageEnhance.Contrast(pil_img)\n", + " enhanced = enhancer.enhance(1.5)\n", + " return cv2.cvtColor(np.array(enhanced), cv2.COLOR_RGB2BGR)\n", + "\n", + "def process_image_cv2_from_pil(pil_img, scale=2):\n", + " try:\n", + " bgr = pil_to_bgr_np(pil_img)\n", + " bgr = upscale_image_cv(bgr, scale=scale) if scale != 1 else bgr\n", + " bgr = reduce_noise_cv(bgr)\n", + " bgr = sharpen_cv(bgr)\n", + " bgr = enhance_contrast_cv(bgr)\n", + " return bgr_np_to_pil(bgr)\n", + " except Exception as e:\n", + " print(f\"[process_image_cv2_from_pil] Error: {e}\")\n", + " return None\n", + "\n", + "# -----------------------\n", + "# Collect dataset image paths\n", + "# -----------------------\n", + "folder_image_paths = []\n", + "for image_dir in image_dirs:\n", + " for root, _, files in os.walk(image_dir):\n", + " for fname in files:\n", + " if fname.lower().endswith((\".png\",\".jpg\",\".jpeg\",\".bmp\",\".gif\")):\n", + " folder_image_paths.append(os.path.join(root, fname))\n", + "\n", + "if not folder_image_paths:\n", + " print(\"No images found in image_dirs. Exiting.\")\n", + " raise SystemExit(1)\n", + "\n", + "print(f\"Found {len(folder_image_paths)} images. Indexing them...\")\n", + "\n", + "# -----------------------\n", + "# Indexing: for each image do base64 -> bg -> embedding/hash/signature\n", + "# -----------------------\n", + "embeddings_dict = {} # path -> normalized embedding (list)\n", + "hash_dict = {} # path -> phash (string)\n", + "signature_obj_map = {} # path -> raw signature object (kept in memory for search)\n", + "signature_json_map = {} # path -> serializable signature (saved to disk)\n", + "base64_map = {} # path -> base64 of BG-added image (inspect later)\n", + "\n", + "added_count = 0\n", + "for path in folder_image_paths:\n", + " try:\n", + " pil = load_image_pil(path)\n", + " if pil is None:\n", + " print(f\"Skipping (load fail): {path}\")\n", + " continue\n", + "\n", + " # 1) first layer: convert original to base64\n", + " try:\n", + " orig_b64 = pil_to_base64(pil, fmt=\"PNG\")\n", + " except Exception as e:\n", + " print(f\"Could not base64 original {path}: {e}\")\n", + " continue\n", + "\n", + " # 2) decode base64 back to PIL (this is your requested first-layer roundtrip)\n", + " pil_from_b64 = base64_to_pil(orig_b64)\n", + " if pil_from_b64 is None:\n", + " print(f\"Could not decode base64 for {path}. Skipping.\")\n", + " continue\n", + "\n", + " # 3) add background to the base64-decoded image (bg_added is used for embedding/hash/signature)\n", + " bg_added = add_background(pil_from_b64, bg_color=(255,255,255), size=pil_from_b64.size)\n", + " if bg_added is None:\n", + " print(f\"Could not add background for {path}. Skipping.\")\n", + " continue\n", + "\n", + " # 4) Save the bg-added image to processed_dir for inspection and for ImageSignature reading\n", + " save_name = os.path.join(processed_dir, os.path.basename(path))\n", + " try:\n", + " bg_added.save(save_name, format=\"PNG\")\n", + " except Exception:\n", + " try:\n", + " bg_added.convert(\"RGB\").save(save_name, format=\"PNG\")\n", + " except Exception as e:\n", + " print(f\"Failed to save processed image for {path}: {e}\")\n", + " # continue anyway, but image-match will fail if file missing\n", + " # 5) Save base64 of the bg-added image\n", + " try:\n", + " bg_b64 = pil_to_base64(bg_added, fmt=\"PNG\")\n", + " base64_map[path] = bg_b64\n", + " except Exception as e:\n", + " base64_map[path] = None\n", + "\n", + " # 6) Prepare forms for model & hash\n", + " pil_for_model = preprocess_for_model(bg_added)\n", + " arr_for_hash = preprocess_for_hash(bg_added)\n", + "\n", + " # 7) DINOv2 embedding\n", + " emb = get_dinov2_embedding_from_pil(pil_for_model)\n", + " if emb is not None:\n", + " embeddings_dict[path] = emb.tolist()\n", + " else:\n", + " print(f\"Embedding failed for {path}\")\n", + "\n", + " # 8) PHash\n", + " if arr_for_hash is not None:\n", + " try:\n", + " ph = phash.encode_image(image_array=arr_for_hash)\n", + " hash_dict[path] = ph\n", + " except Exception as e:\n", + " print(f\"PHash failed for {path}: {e}\")\n", + " else:\n", + " print(f\"Hash preprocess failed for {path}\")\n", + "\n", + " # 9) ImageSignature (uses the saved processed image file path)\n", + " try:\n", + " if os.path.exists(save_name):\n", + " sig = gis.generate_signature(save_name)\n", + " else:\n", + " # as fallback, save to a temporary path and pass that\n", + " tmp = os.path.join(processed_dir, f\"tmp_{Path(path).stem}.png\")\n", + " bg_added.save(tmp, format=\"PNG\")\n", + " sig = gis.generate_signature(tmp)\n", + " try:\n", + " os.remove(tmp)\n", + " except Exception:\n", + " pass\n", + " signature_obj_map[path] = sig\n", + " signature_json_map[path] = make_json_serializable(sig)\n", + " except Exception as e:\n", + " print(f\"[ImageSignature] failed for {path}: {e}\")\n", + " signature_obj_map[path] = None\n", + " signature_json_map[path] = None\n", + "\n", + " added_count += 1\n", + " if added_count % 50 == 0:\n", + " print(f\"Indexed {added_count} images...\")\n", + "\n", + " except Exception as e:\n", + " print(f\"[Indexing] Unexpected error for {path}: {e}\")\n", + "\n", + "print(f\"Indexing complete: {len(embeddings_dict)} embeddings, {len(hash_dict)} phashes, {len(signature_obj_map)} signatures.\")\n", + "\n", + "# -----------------------\n", + "# Save JSON outputs (serializable)\n", + "# -----------------------\n", + "embeddings_json_path = os.path.join(output_dir, \"hybrid_embeddings.json\")\n", + "phash_json_path = os.path.join(output_dir, \"phash_data.json\")\n", + "sig_json_path = os.path.join(output_dir, \"image_signatures.json\")\n", + "base64_json_path = os.path.join(output_dir, \"base64_map.json\")\n", + "\n", + "with open(embeddings_json_path, \"w\") as f:\n", + " json.dump(embeddings_dict, f, indent=2)\n", + "with open(phash_json_path, \"w\") as f:\n", + " json.dump(hash_dict, f, indent=2)\n", + "with open(sig_json_path, \"w\") as f:\n", + " json.dump(signature_json_map, f, indent=2)\n", + "with open(base64_json_path, \"w\") as f:\n", + " json.dump(base64_map, f, indent=2)\n", + "\n", + "print(f\"Saved embeddings -> {embeddings_json_path}\")\n", + "print(f\"Saved phash -> {phash_json_path}\")\n", + "print(f\"Saved image signatures -> {sig_json_path}\")\n", + "print(f\"Saved bg-added base64 map -> {base64_json_path}\")" + ] + }, + { + "cell_type": "markdown", + "id": "dce856d9", + "metadata": {}, + "source": [ + "## Testing on sample" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b7ced492", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "--- Query/Search Phase ---\n", + "\n", + "Top results by DINOv2 Embeddings:\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg | Cosine: 0.6674\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white.png | Cosine: 0.6004\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Blue Sky 2.sb3\\8eb8790be5507fdccf73e7c1570bbbab.png | Cosine: 0.5916\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Button3.sprite3\\a3b357ea21773bcb3545a227ee877e9a.png | Cosine: 0.4592\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Line.sprite3\\e85305b47cfd92d971704dcb7ad6e17b.png | Cosine: 0.4415\n", + "Rank 6: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Paddle.sprite3\\15864fac7d38bb94c1ec3a199de96c26.png | Cosine: 0.4320\n", + "Rank 7: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Blue Sky.sb3\\e7c147730f19d284bcd7b3f00af19bb6.png | Cosine: 0.4109\n", + "Rank 8: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Hatchling.sprite3\\55f7d457eb0af78cb309ca47497c490f.png | Cosine: 0.4022\n", + "Rank 9: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Buildings.sprite3\\8f64966be60d332b345598819c67a8b6.png | Cosine: 0.3950\n", + "Rank 10: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Light.sb3\\4b98c07876ed8997c3762e75790507b4.png | Cosine: 0.3933\n", + "\n", + "Top results by PHash (Hamming distance & normalized sim):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg | Hamming: 6 | NormSim: 0.9062\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Drums Conga.sprite3\\bdad2f140cfbd021f38241fc9acc7fd2.png | Hamming: 16 | NormSim: 0.7500\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Anina Dance.sprite3\\d86bb27b4f8d7b70c39c96f29c6943b4.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Butterfly 2.sprite3\\e96f4c6913107c9b790d37bb65507c14.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Button1.sprite3\\21fb7fa07eac4794fded0be4e18e20a2.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 6: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Orange2.sprite3\\b823f73a31e61fd362574e2c24dfc0c2.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 7: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Potion.sprite3\\d922ffdfe38fd30fd8787810c6bce318.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 8: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Story-G.sprite3\\85144902cc61fe98dca513b74276d7d8.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 9: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Story-M.sprite3\\643896fcad0a1bf6eb9f3f590094687c.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 10: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Story-M.sprite3\\9bf9e677da34528433d3c1acb945e2df.png | Hamming: 20 | NormSim: 0.6875\n", + "\n", + "Top results by ImageSignature (normalized similarity = 1 - distance):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg | NormDist: 0.31594953942701315 | NormSim: 0.6841\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Stars.sb3\\47282ff0f7047c6fab9c94b531abf721.png | NormDist: 0.5075448260035007 | NormSim: 0.4925\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Button3.sprite3\\5021f6b7d166873ef0711c4d4a351912.png | NormDist: 0.5565832838531939 | NormSim: 0.4434\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Theater 2.sb3\\061a78ed83495dd0acd6d62e83e1b972.png | NormDist: 0.5625566207225899 | NormSim: 0.4374\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Beach Malibu.sb3\\050615fe992a00d6af0e664e497ebf53.png | NormDist: 0.6011366772769449 | NormSim: 0.3989\n", + "Rank 6: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Trampoline.sprite3\\8fa3c6fcff2f25f5fe7842d68dcfe5cf.png | NormDist: 0.6039242056273435 | NormSim: 0.3961\n", + "Rank 7: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Bowl.sprite3\\d147f16e3e2583719c073ac5b55fe3ca.png | NormDist: 0.6083915613788574 | NormSim: 0.3916\n", + "Rank 8: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Woods.sb3\\f3eb165d6f3fd23370f97079f2e631bf.png | NormDist: 0.6244258142987815 | NormSim: 0.3756\n", + "Rank 9: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Milk.sprite3\\0f683f65c737bbcbb916df0895d8436e.png | NormDist: 0.6256894961919556 | NormSim: 0.3743\n", + "Rank 10: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Story-Q.sprite3\\84a6dc992bce018a1eac9be0173ad917.png | NormDist: 0.6287226097594288 | NormSim: 0.3713\n", + "\n", + "Top results by Combined Score (avg of embedding|phash|image-match):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg | Combined: 0.7526 | emb: 0.6674 | phash_sim: 0.9062 | imgmatch_sim: 0.6841\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Clouds.sprite3\\9f5958f46d21e33d3f6d7caffbe0daa9.png | Combined: 0.4359 | emb: 0.3676 | phash_sim: 0.6562 | imgmatch_sim: 0.2839\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Button3.sprite3\\5021f6b7d166873ef0711c4d4a351912.png | Combined: 0.4209 | emb: 0.3194 | phash_sim: 0.5000 | imgmatch_sim: 0.4434\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Button1.sprite3\\21fb7fa07eac4794fded0be4e18e20a2.png | Combined: 0.4164 | emb: 0.2152 | phash_sim: 0.6875 | imgmatch_sim: 0.3467\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Trees.sprite3\\04758bd432a8b1cab527bddf14432147.png | Combined: 0.4162 | emb: 0.2414 | phash_sim: 0.6875 | imgmatch_sim: 0.3196\n", + "Rank 6: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Buildings.sprite3\\fcedb6b25a2db6de28b39130f978b0bf.png | Combined: 0.4120 | emb: 0.3399 | phash_sim: 0.5625 | imgmatch_sim: 0.3334\n", + "Rank 7: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Buildings.sprite3\\8f64966be60d332b345598819c67a8b6.png | Combined: 0.4102 | emb: 0.3950 | phash_sim: 0.5625 | imgmatch_sim: 0.2732\n", + "Rank 8: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Paddle.sprite3\\15864fac7d38bb94c1ec3a199de96c26.png | Combined: 0.4077 | emb: 0.4320 | phash_sim: 0.5312 | imgmatch_sim: 0.2599\n", + "Rank 9: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Story-O.sprite3\\40bf3880b678beeda8cf708a51a4402d.png | Combined: 0.4077 | emb: 0.2115 | phash_sim: 0.6562 | imgmatch_sim: 0.3554\n", + "Rank 10: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Snake.sprite3\\42519e0ee19d75def88a514d3c49ce37.png | Combined: 0.4070 | emb: 0.2667 | phash_sim: 0.6562 | imgmatch_sim: 0.2979\n", + "\n", + "Search complete.\n" + ] + } + ], + "source": [ + "QUERY_PATH = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\figure-1-3.jpg\"\n", + "# -----------------------\n", + "# Query flow (base64 -> preprocess -> embed -> scoring)\n", + "# -----------------------\n", + "print(\"\\n--- Query/Search Phase ---\")\n", + "if not os.path.exists(QUERY_PATH):\n", + " print(f\"Query image not found: {QUERY_PATH}\")\n", + " raise SystemExit(1)\n", + "\n", + "# Load + first-layer base64 roundtrip\n", + "query_pil_orig = load_image_pil(QUERY_PATH)\n", + "if query_pil_orig is None:\n", + " print(\"Could not load query image. Exiting.\")\n", + " raise SystemExit(1)\n", + "\n", + "try:\n", + " query_b64 = pil_to_base64(query_pil_orig, fmt=\"PNG\")\n", + "except Exception as e:\n", + " print(f\"Could not base64 query: {e}\")\n", + " raise SystemExit(1)\n", + "\n", + "query_from_b64 = base64_to_pil(query_b64)\n", + "if query_from_b64 is None:\n", + " print(\"Could not decode query base64. Exiting.\")\n", + " raise SystemExit(1)\n", + "\n", + "# Preprocess (OpenCV enhancement)\n", + "enhanced_query_pil = process_image_cv2_from_pil(query_from_b64, scale=2)\n", + "if enhanced_query_pil is None:\n", + " print(\"[Query] OpenCV enhancement failed; falling back to base64-decoded image.\")\n", + " enhanced_query_pil = query_from_b64\n", + "\n", + "# Note: Per your request, query flow does: base64 -> preprocess -> embed -> score\n", + "# (we DO NOT forcibly add background to the query here; stored images were bg-added)\n", + "# However we still compute phash & signature on the preprocessed query (no bg).\n", + "\n", + "# Save the enhanced query for inspection\n", + "query_enhanced_path = os.path.join(processed_dir, \"query_enhanced.png\")\n", + "try:\n", + " enhanced_query_pil.save(query_enhanced_path, format=\"PNG\")\n", + "except Exception:\n", + " try:\n", + " enhanced_query_pil.convert(\"RGB\").save(query_enhanced_path, format=\"PNG\")\n", + " except Exception:\n", + " pass\n", + "\n", + "# 1) Query embedding\n", + "query_emb = get_dinov2_embedding_from_pil(preprocess_for_model(enhanced_query_pil))\n", + "if query_emb is None:\n", + " print(\"Could not compute query embedding. Exiting.\")\n", + " raise SystemExit(1)\n", + "\n", + "# 2) Query phash (from enhanced query; stored phashes were from bg_added images)\n", + "query_hash_arr = preprocess_for_hash(enhanced_query_pil)\n", + "if query_hash_arr is None:\n", + " print(\"Could not compute query phash array. Exiting.\")\n", + " raise SystemExit(1)\n", + "query_phash = phash.encode_image(image_array=query_hash_arr)\n", + "\n", + "# 3) Query signature (save and generate)\n", + "query_sig_path = os.path.join(processed_dir, \"query_for_sig.png\")\n", + "try:\n", + " enhanced_query_pil.save(query_sig_path, format=\"PNG\")\n", + "except Exception:\n", + " enhanced_query_pil.convert(\"RGB\").save(query_sig_path, format=\"PNG\")\n", + "try:\n", + " query_sig = gis.generate_signature(query_sig_path)\n", + "except Exception as e:\n", + " print(f\"[ImageSignature] failed for query: {e}\")\n", + " query_sig = None\n", + "\n", + "# -----------------------\n", + "# Compute similarities\n", + "# -----------------------\n", + "# prepare embeddings array\n", + "image_paths = list(embeddings_dict.keys())\n", + "image_embeddings = np.array(list(embeddings_dict.values()), dtype=float) if embeddings_dict else np.array([])\n", + "\n", + "# cosine similarity\n", + "def cosine_similarity(a, b):\n", + " return float(np.dot(a, b))\n", + "\n", + "# collect scores\n", + "k = 10\n", + "embedding_results = []\n", + "phash_results = []\n", + "imgmatch_results = []\n", + "combined_results = []\n", + "\n", + "# for phash normalization, use MAX_PHASH_BITS; if phash distances exceed that, clamp\n", + "for idx, path in enumerate(image_paths):\n", + " # embedding\n", + " try:\n", + " stored_emb = image_embeddings[idx]\n", + " emb_sim = cosine_similarity(query_emb, stored_emb)\n", + " except Exception:\n", + " emb_sim = -1.0\n", + "\n", + " embedding_results.append((path, emb_sim))\n", + "\n", + " # phash\n", + " try:\n", + " stored_ph = hash_dict.get(path)\n", + " if stored_ph is not None:\n", + " hd = phash.hamming_distance(query_phash, stored_ph)\n", + " # normalize to similarity in [0,1]\n", + " ph_sim = max(0.0, 1.0 - (hd / MAX_PHASH_BITS))\n", + " else:\n", + " hd = None\n", + " ph_sim = 0.0\n", + " except Exception:\n", + " hd = None\n", + " ph_sim = 0.0\n", + " phash_results.append((path, hd, ph_sim))\n", + "\n", + " # image-match / signature\n", + " try:\n", + " stored_sig = signature_obj_map.get(path)\n", + " if stored_sig is not None and query_sig is not None:\n", + " dist = gis.normalized_distance(stored_sig, query_sig) # smaller is more similar\n", + " im_sim = max(0.0, 1.0 - dist)\n", + " else:\n", + " dist = None\n", + " im_sim = 0.0\n", + " except Exception:\n", + " dist = None\n", + " im_sim = 0.0\n", + " imgmatch_results.append((path, dist, im_sim))\n", + "\n", + " # combined score: average of available similarities (embedding in [-1,1] but usually [0,1])\n", + " # clamp embedding to [0,1] (if negative, set 0)\n", + " emb_sim_clamped = max(0.0, min(1.0, emb_sim))\n", + " # average across the three similarity measures\n", + " combined = (emb_sim_clamped + ph_sim + im_sim) / 3.0\n", + " combined_results.append((path, combined, emb_sim, ph_sim, im_sim))\n", + "\n", + "# sort results\n", + "embedding_results.sort(key=lambda x: x[1], reverse=True)\n", + "phash_results_sorted = sorted(phash_results, key=lambda x: (x[2] is not None, x[2]), reverse=True)\n", + "imgmatch_results_sorted = sorted(imgmatch_results, key=lambda x: (x[2] is not None, x[2]), reverse=True)\n", + "combined_results.sort(key=lambda x: x[1], reverse=True)\n", + "\n", + "# -----------------------\n", + "# Print Top-K Results\n", + "# -----------------------\n", + "print(\"\\nTop results by DINOv2 Embeddings:\")\n", + "for i, (path,score) in enumerate(embedding_results[:k], start=1):\n", + " print(f\"Rank {i}: {path} | Cosine: {score:.4f}\")\n", + "\n", + "print(\"\\nTop results by PHash (Hamming distance & normalized sim):\")\n", + "for i, (path, hd, sim) in enumerate(phash_results_sorted[:k], start=1):\n", + " print(f\"Rank {i}: {path} | Hamming: {hd} | NormSim: {sim:.4f}\")\n", + "\n", + "print(\"\\nTop results by ImageSignature (normalized similarity = 1 - distance):\")\n", + "for i, (path, dist, sim) in enumerate(imgmatch_results_sorted[:k], start=1):\n", + " print(f\"Rank {i}: {path} | NormDist: {dist} | NormSim: {sim:.4f}\")\n", + "\n", + "print(\"\\nTop results by Combined Score (avg of embedding|phash|image-match):\")\n", + "for i, (path, combined, emb_clamped, ph_sim, im_sim) in enumerate(combined_results[:k], start=1):\n", + " print(f\"Rank {i}: {path} | Combined: {combined:.4f} | emb: {emb_clamped:.4f} | phash_sim: {ph_sim:.4f} | imgmatch_sim: {im_sim:.4f}\")\n", + "\n", + "print(\"\\nSearch complete.\")" + ] + }, + { + "cell_type": "markdown", + "id": "378d84f4", + "metadata": {}, + "source": [ + "## Function for the generating top 10 sample each" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "fa658fd5", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import numpy as np\n", + "from typing import Dict, Any, Tuple, List, Optional\n", + "\n", + "def run_query_search_flow(\n", + " query_path: Optional[str] = None,\n", + " query_b64: Optional[str] = None,\n", + " processed_dir: str = \"./processed\",\n", + " embeddings_dict: Dict[str, np.ndarray] = None,\n", + " hash_dict: Dict[str, Any] = None,\n", + " signature_obj_map: Dict[str, Any] = None,\n", + " gis: Any = None,\n", + " phash: Any = None,\n", + " MAX_PHASH_BITS: int = 64,\n", + " k: int = 10,\n", + ") -> Tuple[\n", + " List[Tuple[str, float]],\n", + " List[Tuple[str, Any, float]],\n", + " List[Tuple[str, Any, float]],\n", + " List[Tuple[str, float, float, float, float]],\n", + "]:\n", + " \"\"\"\n", + " Run the full query/search flow (base64 -> preprocess -> embed -> scoring).\n", + " Accepts either query_path (file on disk) OR query_b64 (base64 string). If both are\n", + " provided, query_b64 takes precedence.\n", + "\n", + " Returns:\n", + " embedding_results_sorted,\n", + " phash_results_sorted,\n", + " imgmatch_results_sorted,\n", + " combined_results_sorted\n", + " \"\"\"\n", + "\n", + " # Validate inputs\n", + " if (query_path is None or query_path == \"\") and (query_b64 is None or query_b64 == \"\"):\n", + " raise ValueError(\"Either query_path or query_b64 must be provided.\")\n", + "\n", + " # Ensure processed_dir exists\n", + " os.makedirs(processed_dir, exist_ok=True)\n", + "\n", + " print(\"\\n--- Query/Search Phase ---\")\n", + "\n", + " # 1) Load query image (prefer base64 if provided)\n", + " if query_b64:\n", + " # base64 provided directly -> decode to PIL\n", + " query_from_b64 = base64_to_pil(query_b64)\n", + " if query_from_b64 is None:\n", + " raise RuntimeError(\"Could not decode provided base64 query. Exiting.\")\n", + " query_pil_orig = query_from_b64\n", + " else:\n", + " # load from disk\n", + " if not os.path.exists(query_path):\n", + " raise FileNotFoundError(f\"Query image not found: {query_path}\")\n", + " query_pil_orig = load_image_pil(query_path)\n", + " if query_pil_orig is None:\n", + " raise RuntimeError(\"Could not load query image from path. Exiting.\")\n", + "\n", + " # also create a base64 roundtrip for robustness (keep original behaviour)\n", + " try:\n", + " query_b64 = pil_to_base64(query_pil_orig, fmt=\"PNG\")\n", + " except Exception as e:\n", + " raise RuntimeError(f\"Could not base64 query from disk image: {e}\")\n", + " # keep decoded copy for consistency\n", + " query_from_b64 = base64_to_pil(query_b64)\n", + " if query_from_b64 is None:\n", + " raise RuntimeError(\"Could not decode query base64 after roundtrip. Exiting.\")\n", + "\n", + " # At this point, query_from_b64 is a PIL.Image we can continue with\n", + " # 2) Preprocess with OpenCV enhancement (best-effort; fallback to base64-decoded image)\n", + " enhanced_query_pil = process_image_cv2_from_pil(query_from_b64, scale=2)\n", + " if enhanced_query_pil is None:\n", + " print(\"[Query] OpenCV enhancement failed; falling back to base64-decoded image.\")\n", + " enhanced_query_pil = query_from_b64\n", + "\n", + " # Save the enhanced query (best-effort)\n", + " query_enhanced_path = os.path.join(processed_dir, \"query_enhanced.png\")\n", + " try:\n", + " enhanced_query_pil.save(query_enhanced_path, format=\"PNG\")\n", + " except Exception:\n", + " try:\n", + " enhanced_query_pil.convert(\"RGB\").save(query_enhanced_path, format=\"PNG\")\n", + " except Exception:\n", + " print(\"[Warning] Could not save enhanced query image for inspection.\")\n", + "\n", + " # 3) Query embedding (preprocess -> model)\n", + " prepped = preprocess_for_model(enhanced_query_pil)\n", + " query_emb = get_dinov2_embedding_from_pil(prepped)\n", + " if query_emb is None:\n", + " raise RuntimeError(\"Could not compute query embedding. Exiting.\")\n", + "\n", + " # 4) Query phash computation\n", + " query_hash_arr = preprocess_for_hash(enhanced_query_pil)\n", + " if query_hash_arr is None:\n", + " raise RuntimeError(\"Could not compute query phash array. Exiting.\")\n", + " query_phash = phash.encode_image(image_array=query_hash_arr)\n", + "\n", + " # 5) Query signature generation (best-effort)\n", + " query_sig = None\n", + " query_sig_path = os.path.join(processed_dir, \"query_for_sig.png\")\n", + " try:\n", + " enhanced_query_pil.save(query_sig_path, format=\"PNG\")\n", + " except Exception:\n", + " try:\n", + " enhanced_query_pil.convert(\"RGB\").save(query_sig_path, format=\"PNG\")\n", + " except Exception:\n", + " query_sig_path = None\n", + "\n", + " if query_sig_path:\n", + " try:\n", + " query_sig = gis.generate_signature(query_sig_path)\n", + " except Exception as e:\n", + " print(f\"[ImageSignature] failed for query: {e}\")\n", + " query_sig = None\n", + "\n", + " # -----------------------\n", + " # Prepare stored data arrays\n", + " # -----------------------\n", + " embeddings_dict = embeddings_dict or {}\n", + " hash_dict = hash_dict or {}\n", + " signature_obj_map = signature_obj_map or {}\n", + "\n", + " image_paths = list(embeddings_dict.keys())\n", + " image_embeddings = np.array(list(embeddings_dict.values()), dtype=float) if embeddings_dict else np.array([])\n", + "\n", + " def cosine_similarity(a: np.ndarray, b: np.ndarray) -> float:\n", + " try:\n", + " return float(np.dot(a, b))\n", + " except Exception:\n", + " return -1.0\n", + "\n", + " # Collections\n", + " embedding_results: List[Tuple[str, float]] = []\n", + " phash_results: List[Tuple[str, Any, float]] = []\n", + " imgmatch_results: List[Tuple[str, Any, float]] = []\n", + " combined_results: List[Tuple[str, float, float, float, float]] = []\n", + "\n", + " # Iterate stored images and compute similarities\n", + " for idx, path in enumerate(image_paths):\n", + " # Embedding similarity\n", + " try:\n", + " stored_emb = image_embeddings[idx]\n", + " emb_sim = cosine_similarity(query_emb, stored_emb)\n", + " except Exception:\n", + " emb_sim = -1.0\n", + " embedding_results.append((path, emb_sim))\n", + "\n", + " # PHash similarity (Hamming -> normalized sim)\n", + " try:\n", + " stored_ph = hash_dict.get(path)\n", + " if stored_ph is not None:\n", + " hd = phash.hamming_distance(query_phash, stored_ph)\n", + " ph_sim = max(0.0, 1.0 - (hd / float(MAX_PHASH_BITS)))\n", + " else:\n", + " hd = None\n", + " ph_sim = 0.0\n", + " except Exception:\n", + " hd = None\n", + " ph_sim = 0.0\n", + " phash_results.append((path, hd, ph_sim))\n", + "\n", + " # Image signature similarity (normalized distance -> similarity)\n", + " try:\n", + " stored_sig = signature_obj_map.get(path)\n", + " if stored_sig is not None and query_sig is not None:\n", + " dist = gis.normalized_distance(stored_sig, query_sig)\n", + " im_sim = max(0.0, 1.0 - dist)\n", + " else:\n", + " dist = None\n", + " im_sim = 0.0\n", + " except Exception:\n", + " dist = None\n", + " im_sim = 0.0\n", + " imgmatch_results.append((path, dist, im_sim))\n", + "\n", + " # Combined score: average of the three (embedding is clamped into [0,1])\n", + " emb_clamped = max(0.0, min(1.0, emb_sim))\n", + " combined = (emb_clamped + ph_sim + im_sim) / 3.0\n", + " combined_results.append((path, combined, emb_clamped, ph_sim, im_sim))\n", + "\n", + " # -----------------------\n", + " # Sort results\n", + " # -----------------------\n", + " embedding_results.sort(key=lambda x: x[1], reverse=True)\n", + " phash_results_sorted = sorted(phash_results, key=lambda x: (x[2] is not None, x[2]), reverse=True)\n", + " imgmatch_results_sorted = sorted(imgmatch_results, key=lambda x: (x[2] is not None, x[2]), reverse=True)\n", + " combined_results.sort(key=lambda x: x[1], reverse=True)\n", + "\n", + " # -----------------------\n", + " # Print Top-K results\n", + " # -----------------------\n", + " print(\"\\nTop results by DINOv2 Embeddings:\")\n", + " for i, (path, score) in enumerate(embedding_results[:k], start=1):\n", + " print(f\"Rank {i}: {path} | Cosine: {score:.4f}\")\n", + "\n", + " print(\"\\nTop results by PHash (Hamming distance & normalized sim):\")\n", + " for i, (path, hd, sim) in enumerate(phash_results_sorted[:k], start=1):\n", + " print(f\"Rank {i}: {path} | Hamming: {hd} | NormSim: {sim:.4f}\")\n", + "\n", + " print(\"\\nTop results by ImageSignature (normalized similarity = 1 - distance):\")\n", + " for i, (path, dist, sim) in enumerate(imgmatch_results_sorted[:k], start=1):\n", + " print(f\"Rank {i}: {path} | NormDist: {dist} | NormSim: {sim:.4f}\")\n", + "\n", + " print(\"\\nTop results by Combined Score (avg of embedding|phash|image-match):\")\n", + " for i, (path, combined, emb_clamped, ph_sim, im_sim) in enumerate(combined_results[:k], start=1):\n", + " print(f\"Rank {i}: {path} | Combined: {combined:.4f} | emb: {emb_clamped:.4f} | phash_sim: {ph_sim:.4f} | imgmatch_sim: {im_sim:.4f}\")\n", + "\n", + " print(\"\\nSearch complete.\")\n", + "\n", + " # Return sorted lists for programmatic consumption\n", + " return embedding_results, phash_results_sorted, imgmatch_results_sorted, combined_results\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "7a531778", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "--- Query/Search Phase ---\n", + "\n", + "Top results by DINOv2 Embeddings:\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jaime.sprite3\\63e56d28cc3e3d9b735e1f1d51248cc0.png | Cosine: 0.4156\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Wizard.sprite3\\df943c9894ee4b9df8c5893ce30c2a5f.png | Cosine: 0.4145\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Avery Walking.sprite3\\dc6a584704c09a3fbafb9825635a9fd4.png | Cosine: 0.4117\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Wizard.sprite3\\91d495085eb4d02a375c42f6318071e7.png | Cosine: 0.4091\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jaime.sprite3\\5883bdefba451aaeac8d77c798d41eb0.png | Cosine: 0.4049\n", + "\n", + "Top results by PHash (Hamming distance & normalized sim):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Fish.sprite3\\7a0c31c0087f342867d4754f8dc57541.png | Hamming: 18 | NormSim: 0.7188\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Pitcher.sprite3\\ae8aa57ce6e5729d30d8b785bec97774.png | Hamming: 18 | NormSim: 0.7188\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Boardwalk.sb3\\de0e54cd11551566f044e7e6bc588b2c.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Block-G.sprite3\\989c76ae7f8c2e42ebeacdda961061ca.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Dan.sprite3\\307250744e230fb15e7062238bf2634c.png | Hamming: 20 | NormSim: 0.6875\n", + "\n", + "Top results by ImageSignature (normalized similarity = 1 - distance):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Story-T.sprite3\\001a2186db228fdd9bfbf3f15800bb63.png | NormDist: 0.4560392101427519 | NormSim: 0.5440\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\LB Dance.sprite3\\79ca528d13ffb557a236f0a35a0eb486.png | NormDist: 0.4608133398933189 | NormSim: 0.5392\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Story-T.sprite3\\66b22b0ff0a5c1c205a701316ab954cf.png | NormDist: 0.4623920634544405 | NormSim: 0.5376\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Anina Dance.sprite3\\2d208a34e74fdce9dab9d4c585dcfa2b.png | NormDist: 0.46903307230486224 | NormSim: 0.5310\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Anina Dance.sprite3\\ed90e8b7a05c1552194af597ac0637cd.png | NormDist: 0.48879650827597787 | NormSim: 0.5112\n", + "\n", + "Top results by Combined Score (avg of embedding|phash|image-match):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Story-T.sprite3\\001a2186db228fdd9bfbf3f15800bb63.png | Combined: 0.4775 | emb: 0.2323 | phash_sim: 0.6562 | imgmatch_sim: 0.5440\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Anina Dance.sprite3\\ed90e8b7a05c1552194af597ac0637cd.png | Combined: 0.4749 | emb: 0.2574 | phash_sim: 0.6562 | imgmatch_sim: 0.5112\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Anina Dance.sprite3\\2d208a34e74fdce9dab9d4c585dcfa2b.png | Combined: 0.4586 | emb: 0.2509 | phash_sim: 0.5938 | imgmatch_sim: 0.5310\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Wizard.sprite3\\91d495085eb4d02a375c42f6318071e7.png | Combined: 0.4508 | emb: 0.4091 | phash_sim: 0.6250 | imgmatch_sim: 0.3183\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Wizard.sprite3\\df943c9894ee4b9df8c5893ce30c2a5f.png | Combined: 0.4503 | emb: 0.4145 | phash_sim: 0.5312 | imgmatch_sim: 0.4053\n", + "\n", + "Search complete.\n" + ] + } + ], + "source": [ + "embedding_results, phash_results, imgmatch_results, combined_results = run_query_search_flow(\n", + " query_b64=query_b64,\n", + " processed_dir=\"./processed\",\n", + " embeddings_dict=embeddings_dict,\n", + " hash_dict=hash_dict,\n", + " signature_obj_map=signature_obj_map,\n", + " gis=gis,\n", + " phash=phash,\n", + " MAX_PHASH_BITS=64,\n", + " k=5\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "c753ec47", + "metadata": {}, + "source": [ + "## Funtion for finalizing the match atlast" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "id": "8ae24c95", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Top by Weighted Normalized Average (weights emb,ph,img = 0.50,0.30,0.20):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg score=1.0000 emb=1.000 ph=1.000 im=1.000\n", + " 2. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Blue Sky 2.sb3\\8eb8790be5507fdccf73e7c1570bbbab.png score=0.5695 emb=0.886 ph=0.421 im=0.000\n", + " 3. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Clouds.sprite3\\9f5958f46d21e33d3f6d7caffbe0daa9.png score=0.5320 emb=0.551 ph=0.579 im=0.415\n", + " 4. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white.png score=0.5287 emb=0.900 ph=0.263 im=0.000\n", + " 5. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Paddle.sprite3\\15864fac7d38bb94c1ec3a199de96c26.png score=0.5101 emb=0.647 ph=0.368 im=0.380\n", + " 6. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Buildings.sprite3\\8f64966be60d332b345598819c67a8b6.png score=0.5021 emb=0.592 ph=0.421 im=0.399\n", + " 7. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Egg.sprite3\\f8ee449298c1446cb0ef281923a4e57a.png score=0.4928 emb=0.552 ph=0.474 im=0.374\n", + " 8. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Buildings.sprite3\\fcedb6b25a2db6de28b39130f978b0bf.png score=0.4785 emb=0.509 ph=0.421 im=0.487\n", + " 9. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\script4.JPG score=0.4744 emb=0.540 ph=0.368 im=0.469\n", + " 10. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Line.sprite3\\e85305b47cfd92d971704dcb7ad6e17b.png score=0.4728 emb=0.661 ph=0.474 im=0.000\n", + "\n", + "Top by Rank-sum (lower is better):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg rank_sum=3 emb_rank=1 ph_rank=1 img_rank=1\n", + " 2. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Button1.sprite3\\21fb7fa07eac4794fded0be4e18e20a2.png rank_sum=150 emb_rank=108 ph_rank=5 img_rank=37\n", + " 3. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Story-O.sprite3\\40bf3880b678beeda8cf708a51a4402d.png rank_sum=167 emb_rank=116 ph_rank=26 img_rank=25\n", + " 4. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Trees.sprite3\\04758bd432a8b1cab527bddf14432147.png rank_sum=214 emb_rank=71 ph_rank=12 img_rank=131\n", + " 5. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Buildings.sprite3\\fcedb6b25a2db6de28b39130f978b0bf.png rank_sum=254 emb_rank=22 ph_rank=169 img_rank=63\n", + " 6. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Glow-Q.sprite3\\e4ae18bf8b92ae375ce818d754588c76.png rank_sum=255 emb_rank=113 ph_rank=100 img_rank=42\n", + " 7. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Rays.sb3\\87e963282db9e020e8c4d075891ea12b.png rank_sum=257 emb_rank=95 ph_rank=29 img_rank=133\n", + " 8. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Story-M.sprite3\\9bf9e677da34528433d3c1acb945e2df.png rank_sum=274 emb_rank=209 ph_rank=10 img_rank=55\n", + " 9. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Milk.sprite3\\0f683f65c737bbcbb916df0895d8436e.png rank_sum=289 emb_rank=168 ph_rank=112 img_rank=9\n", + " 10. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Story-R.sprite3\\4f217b14a161fcd9590614b0733100ea.png rank_sum=290 emb_rank=101 ph_rank=129 img_rank=60\n", + "\n", + "Top by Harmonic mean (requires non-zero on all metrics):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg harm=1.0000 emb=1.000 ph=1.000 im=1.000\n", + " 2. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Clouds.sprite3\\9f5958f46d21e33d3f6d7caffbe0daa9.png harm=0.5039 emb=0.551 ph=0.579 im=0.415\n", + " 3. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Buildings.sprite3\\fcedb6b25a2db6de28b39130f978b0bf.png harm=0.4695 emb=0.509 ph=0.421 im=0.487\n", + " 4. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Trees.sprite3\\04758bd432a8b1cab527bddf14432147.png harm=0.4623 emb=0.362 ph=0.632 im=0.467\n", + " 5. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Snake.sprite3\\42519e0ee19d75def88a514d3c49ce37.png harm=0.4597 emb=0.400 ph=0.579 im=0.435\n", + " 6. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Ghost.sprite3\\d1d89391f1d9c74557e504456d58a002.png harm=0.4594 emb=0.398 ph=0.579 im=0.437\n", + " 7. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Buildings.sprite3\\8f64966be60d332b345598819c67a8b6.png harm=0.4567 emb=0.592 ph=0.421 im=0.399\n", + " 8. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Egg.sprite3\\f8ee449298c1446cb0ef281923a4e57a.png harm=0.4546 emb=0.552 ph=0.474 im=0.374\n", + " 9. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Buildings.sprite3\\80b120b7152ed72fded84fef485f4f79.png harm=0.4531 emb=0.474 ph=0.474 im=0.417\n", + " 10. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Button1.sprite3\\21fb7fa07eac4794fded0be4e18e20a2.png harm=0.4506 emb=0.322 ph=0.632 im=0.507\n", + "\n", + "Consensus (in top-10 of ALL metrics): ['D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_agent\\\\blocks\\\\code_blocks\\\\white2.jpg']\n", + "\n", + "FINAL selected candidate: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg\n" + ] + } + ], + "source": [ + "# --------------------------\n", + "# Choose best candidate helper\n", + "# --------------------------\n", + "from collections import defaultdict\n", + "import math\n", + "\n", + "def choose_top_candidates(embedding_results, phash_results, imgmatch_results, top_k=10,\n", + " method_weights=(0.5, 0.3, 0.2), verbose=True):\n", + " \"\"\"\n", + " embedding_results: list of (path, emb_sim) where emb_sim roughly in [-1,1] (we'll clamp to 0..1)\n", + " phash_results: list of (path, hamming, ph_sim) where ph_sim in [0,1]\n", + " imgmatch_results: list of (path, dist, im_sim) where im_sim in [0,1]\n", + " method_weights: weights for (emb, phash, imgmatch) when using weighted average\n", + " returns dict with top candidates from three methods and diagnostics\n", + " \"\"\"\n", + " # Build dicts for quick lookup\n", + " emb_map = {p: float(s) for p, s in embedding_results}\n", + " ph_map = {p: float(sim) for p, _, sim in phash_results}\n", + " im_map = {p: float(sim) for p, _, sim in imgmatch_results}\n", + "\n", + " # Universe of candidates (union)\n", + " all_paths = sorted(set(list(emb_map.keys()) + list(ph_map.keys()) + list(im_map.keys())))\n", + "\n", + " # --- Normalize each metric across candidates to [0,1] ---\n", + " def normalize_map(m):\n", + " vals = [m.get(p, None) for p in all_paths]\n", + " # treat missing as None\n", + " present = [v for v in vals if v is not None and not math.isnan(v)]\n", + " if not present:\n", + " return {p: 0.0 for p in all_paths}\n", + " vmin, vmax = min(present), max(present)\n", + " if vmax == vmin:\n", + " # constant -> map present values to 1.0, missing to 0\n", + " return {p: (1.0 if (m.get(p, None) is not None) else 0.0) for p in all_paths}\n", + " norm = {}\n", + " for p in all_paths:\n", + " v = m.get(p, None)\n", + " if v is None or math.isnan(v):\n", + " norm[p] = 0.0\n", + " else:\n", + " norm[p] = (v - vmin) / (vmax - vmin)\n", + " # clamp\n", + " if norm[p] < 0: norm[p] = 0.0\n", + " if norm[p] > 1: norm[p] = 1.0\n", + " return norm\n", + "\n", + " # For embeddings, clamp negatives to 0 first (optional)\n", + " emb_map_clamped = {}\n", + " for p, v in emb_map.items():\n", + " # common approach: embeddings are cosine in [-1,1]; clamp negatives to 0 to treat as no-sim\n", + " emb_map_clamped[p] = max(0.0, v)\n", + "\n", + " emb_norm = normalize_map(emb_map_clamped)\n", + " ph_norm = normalize_map(ph_map)\n", + " im_norm = normalize_map(im_map)\n", + "\n", + " # --- Method A: Normalized weighted average ---\n", + " w_emb, w_ph, w_im = method_weights\n", + " weighted_scores = {}\n", + " for p in all_paths:\n", + " weighted_scores[p] = (w_emb * emb_norm.get(p, 0.0)\n", + " + w_ph * ph_norm.get(p, 0.0)\n", + " + w_im * im_norm.get(p, 0.0))\n", + "\n", + " top_weighted = sorted(weighted_scores.items(), key=lambda x: x[1], reverse=True)[:top_k]\n", + "\n", + " # --- Method B: Rank-sum (Borda) ---\n", + " # compute ranks per metric (higher value => better rank 1)\n", + " def ranks_from_map(m_norm):\n", + " # bigger is better\n", + " items = sorted(m_norm.items(), key=lambda x: x[1], reverse=True)\n", + " ranks = {}\n", + " for i, (p, _) in enumerate(items):\n", + " ranks[p] = i + 1 # 1-based\n", + " # missing entries get worst rank (len+1)\n", + " worst = len(items) + 1\n", + " for p in all_paths:\n", + " if p not in ranks:\n", + " ranks[p] = worst\n", + " return ranks\n", + "\n", + " rank_emb = ranks_from_map(emb_norm)\n", + " rank_ph = ranks_from_map(ph_norm)\n", + " rank_im = ranks_from_map(im_norm)\n", + "\n", + " rank_sum = {}\n", + " for p in all_paths:\n", + " rank_sum[p] = rank_emb.get(p, 9999) + rank_ph.get(p, 9999) + rank_im.get(p, 9999)\n", + " top_rank_sum = sorted(rank_sum.items(), key=lambda x: x[1])[:top_k] # smaller is better\n", + "\n", + " # --- Method C: Harmonic mean of the normalized scores (penalizes missing/low values) ---\n", + " harm_scores = {}\n", + " for p in all_paths:\n", + " a = emb_norm.get(p, 0.0)\n", + " b = ph_norm.get(p, 0.0)\n", + " c = im_norm.get(p, 0.0)\n", + " # avoid zeros -> harmonic is defined for positive values, but we want to allow zero => it will be 0\n", + " if a + b + c == 0:\n", + " harm = 0.0\n", + " else:\n", + " # harmonic mean for three values: 3 / (1/a + 1/b + 1/c), but if any is zero, result is 0\n", + " if a == 0 or b == 0 or c == 0:\n", + " harm = 0.0\n", + " else:\n", + " harm = 3.0 / ((1.0/a) + (1.0/b) + (1.0/c))\n", + " harm_scores[p] = harm\n", + " top_harm = sorted(harm_scores.items(), key=lambda x: x[1], reverse=True)[:top_k]\n", + "\n", + " # --- Consensus set: items that appear in top-K of each metric individually ---\n", + " def topk_set_by_map(m_norm, k=top_k):\n", + " return set([p for p,_ in sorted(m_norm.items(), key=lambda x: x[1], reverse=True)[:k]])\n", + " cons_set = topk_set_by_map(emb_norm, top_k) & topk_set_by_map(ph_norm, top_k) & topk_set_by_map(im_norm, top_k)\n", + "\n", + " # Build readable outputs\n", + " result = {\n", + " \"emb_norm\": emb_norm,\n", + " \"ph_norm\": ph_norm,\n", + " \"im_norm\": im_norm,\n", + " \"weighted_topk\": top_weighted,\n", + " \"rank_sum_topk\": top_rank_sum,\n", + " \"harmonic_topk\": top_harm,\n", + " \"consensus_topk\": list(cons_set),\n", + " \"weighted_scores_full\": weighted_scores,\n", + " \"rank_sum_full\": rank_sum,\n", + " \"harmonic_full\": harm_scores\n", + " }\n", + "\n", + " if verbose:\n", + " print(\"\\nTop by Weighted Normalized Average (weights emb,ph,img = {:.2f},{:.2f},{:.2f}):\".format(w_emb, w_ph, w_im))\n", + " for i,(p,s) in enumerate(result[\"weighted_topk\"], start=1):\n", + " print(f\" {i}. {p} score={s:.4f} emb={emb_norm.get(p,0):.3f} ph={ph_norm.get(p,0):.3f} im={im_norm.get(p,0):.3f}\")\n", + "\n", + " print(\"\\nTop by Rank-sum (lower is better):\")\n", + " for i,(p,s) in enumerate(result[\"rank_sum_topk\"], start=1):\n", + " print(f\" {i}. {p} rank_sum={s} emb_rank={rank_emb.get(p)} ph_rank={rank_ph.get(p)} img_rank={rank_im.get(p)}\")\n", + "\n", + " print(\"\\nTop by Harmonic mean (requires non-zero on all metrics):\")\n", + " for i,(p,s) in enumerate(result[\"harmonic_topk\"], start=1):\n", + " print(f\" {i}. {p} harm={s:.4f} emb={emb_norm.get(p,0):.3f} ph={ph_norm.get(p,0):.3f} im={im_norm.get(p,0):.3f}\")\n", + "\n", + " print(\"\\nConsensus (in top-{0} of ALL metrics): {1}\".format(top_k, result[\"consensus_topk\"]))\n", + "\n", + " return result\n", + "\n", + "# -----------------------\n", + "# Example usage with your existing variables\n", + "# -----------------------\n", + "# You said you already produced these lists earlier in the script:\n", + "# embedding_results = [(path, emb_sim), ...]\n", + "# phash_results_sorted = [(path, hamming, ph_sim), ...]\n", + "# imgmatch_results_sorted = [(path, dist, im_sim), ...]\n", + "\n", + "# If variable names differ, adjust them accordingly.\n", + "\n", + "result_pick = choose_top_candidates(embedding_results, phash_results_sorted, imgmatch_results_sorted,\n", + " top_k=10, method_weights=(0.5,0.3,0.2), verbose=True)\n", + "\n", + "# Pick final candidate: prefer weighted_topk[0]; if consensus non-empty, prefer consensus highest-weighted among consensus\n", + "final = None\n", + "if len(result_pick[\"consensus_topk\"]) > 0:\n", + " # choose best-weighted among consensus\n", + " consensus = result_pick[\"consensus_topk\"]\n", + " best = max(consensus, key=lambda p: result_pick[\"weighted_scores_full\"].get(p, 0.0))\n", + " final = best\n", + "else:\n", + " final = result_pick[\"weighted_topk\"][0][0] if result_pick[\"weighted_topk\"] else None\n", + "\n", + "print(f\"\\nFINAL selected candidate: {final}\")\n" + ] + }, + { + "cell_type": "markdown", + "id": "4fe31d55", + "metadata": {}, + "source": [ + "## Flow for the app prod" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "330d8265", + "metadata": {}, + "outputs": [], + "source": [ + "import cv2, json,base64,io,os,tempfile,logging, re\n", + "import numpy as np\n", + "from unstructured.partition.pdf import partition_pdf\n", + "from PIL import Image\n", + "# from imutils.perspective import four_point_transform\n", + "from dotenv import load_dotenv\n", + "import pytesseract\n", + "# from werkzeug.utils import secure_filename\n", + "# from langchain_groq import ChatGroq\n", + "# from langgraph.prebuilt import create_react_agent\n", + "from pdf2image import convert_from_path, convert_from_bytes\n", + "from concurrent.futures import ThreadPoolExecutor\n", + "from pdf2image.exceptions import PDFInfoNotInstalledError\n", + "from typing import Dict, TypedDict, Optional, Any\n", + "from langgraph.graph import StateGraph, END\n", + "import uuid\n", + "import shutil, time, functools\n", + "# from langchain_experimental.open_clip.open_clip import OpenCLIPEmbeddings\n", + "# from langchain_core.utils.utils import secret_from_env\n", + "# from matplotlib.offsetbox import OffsetImage, AnnotationBbox\n", + "from io import BytesIO\n", + "from pathlib import Path\n", + "import os\n", + "#from utils.block_relation_builder import block_builder, separate_scripts, transform_logic_to_action_flow, analyze_opcode_counts\n", + "# from langchain.chat_models import ChatOpenAI\n", + "# from langchain_openai import ChatOpenAI\n", + "from pydantic import Field, SecretStr\n", + "from difflib import get_close_matches\n", + "# pytesseract.pytesseract.tesseract_cmd = (r'/usr/bin/tesseract')\n", + "import os\n", + "import pytesseract\n", + "\n", + "# Explicitly set the path to the Tesseract executable\n", + "# Use a raw string (r'...') to handle backslashes correctly\n", + "pytesseract.pytesseract.tesseract_cmd = r'C:\\Program Files\\Tesseract-OCR\\tesseract.exe'\n", + "\n", + "# Explicitly set the path to the Tesseract language data files\n", + "# This is the path to the 'tessdata' folder itself\n", + "os.environ['TESSDATA_PREFIX'] = r'C:\\Program Files\\Tesseract-OCR\\tessdata'" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "d4fc87dc", + "metadata": {}, + "outputs": [], + "source": [ + "# Replace this with the actual path to your PDF file\n", + "pdf_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\test_samp\\e34e5722e8cc42a89b6501c352b9aa0a.pdf\"\n", + " \n", + "# Open and read the PDF file as bytes, then wrap it in a BytesIO stream\n", + "with open(pdf_path, \"rb\") as pdf_file:\n", + " pdf_bytes = pdf_file.read()\n", + " pdf_stream = io.BytesIO(pdf_bytes) " + ] + }, + { + "cell_type": "markdown", + "id": "bb1c7aac", + "metadata": {}, + "source": [ + "## similarity flow" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a5c4163d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Initializing models and helpers...\n", + "DINOv2 loaded on cpu. PHash & ImageSignature ready.\n" + ] + } + ], + "source": [ + "import os\n", + "import json\n", + "import numpy as np\n", + "import torch\n", + "from PIL import Image, ImageOps, ImageEnhance\n", + "from imagededup.methods import PHash\n", + "from transformers import AutoImageProcessor, AutoModel\n", + "import io\n", + "import base64\n", + "from pathlib import Path\n", + "import cv2\n", + "# hashing & image-match\n", + "from image_match.goldberg import ImageSignature\n", + "import sys\n", + "import math\n", + "import hashlib\n", + "from typing import List, Tuple\n", + "\n", + "\n", + "# Query image (adjust path as needed)\n", + "# QUERY_PATH = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\test_samp\\image.jpg\"\n", + "\n", + "# DINOv2 model id\n", + "DINOV2_MODEL = \"facebook/dinov2-small\"\n", + "\n", + "# For PHash normalization when combining scores: assumed max hamming bits (typical phash=64)\n", + "MAX_PHASH_BITS = 64\n", + "\n", + "# -----------------------\n", + "# INITIALIZE MODELS\n", + "# -----------------------\n", + "print(\"Initializing models and helpers...\")\n", + "DEVICE = torch.device(\"cuda\" if torch.cuda.is_available() else \"cpu\")\n", + "if DEVICE.type == \"cpu\":\n", + " torch.set_num_threads(4)\n", + "\n", + "dinov2_processor = AutoImageProcessor.from_pretrained(DINOV2_MODEL)\n", + "dinov2_model = AutoModel.from_pretrained(DINOV2_MODEL)\n", + "dinov2_model.to(DEVICE)\n", + "dinov2_model.eval()\n", + "\n", + "phash = PHash()\n", + "gis = ImageSignature()\n", + "\n", + "print(f\"DINOv2 loaded on {DEVICE}. PHash & ImageSignature ready.\")\n", + "\n", + "# -----------------------\n", + "# SERIALIZABLE HELPER\n", + "# -----------------------\n", + "def make_json_serializable(obj):\n", + " \"\"\"Recursively convert numpy and other objects into JSON-serializable types.\"\"\"\n", + " if obj is None:\n", + " return None\n", + " if isinstance(obj, (str, int, float, bool)):\n", + " return obj\n", + " if isinstance(obj, np.ndarray):\n", + " return obj.tolist()\n", + " if isinstance(obj, dict):\n", + " return {str(k): make_json_serializable(v) for k, v in obj.items()}\n", + " if isinstance(obj, (list, tuple)):\n", + " return [make_json_serializable(v) for v in obj]\n", + " # some image-match signatures may contain numpy, so try .tolist\n", + " try:\n", + " return obj.tolist()\n", + " except Exception:\n", + " pass\n", + " # fallback to string\n", + " return str(obj)\n", + "\n", + "# -----------------------\n", + "# BASE64 <-> PIL\n", + "# -----------------------\n", + "def pil_to_base64(pil_img, fmt=\"PNG\"):\n", + " buffer = io.BytesIO()\n", + " pil_img.save(buffer, format=fmt)\n", + " return base64.b64encode(buffer.getvalue()).decode(\"utf-8\")\n", + "\n", + "def base64_to_pil(b64):\n", + " try:\n", + " data = base64.b64decode(b64)\n", + " return Image.open(io.BytesIO(data))\n", + " except Exception as e:\n", + " print(f\"[base64_to_pil] Error: {e}\")\n", + " return None\n", + "\n", + "# -----------------------\n", + "# PIL helpers\n", + "# -----------------------\n", + "def load_image_pil(path):\n", + " try:\n", + " return Image.open(path)\n", + " except Exception as e:\n", + " print(f\"[load_image_pil] Could not open {path}: {e}\")\n", + " return None\n", + "\n", + "def add_background(pil_img, bg_color=(255,255,255), size=None):\n", + " if pil_img is None:\n", + " return None\n", + " try:\n", + " target = size if size is not None else pil_img.size\n", + " bg = Image.new(\"RGB\", target, bg_color)\n", + " img_rgba = pil_img.convert(\"RGBA\")\n", + " if img_rgba.size != target:\n", + " x = (target[0] - img_rgba.size[0]) // 2\n", + " y = (target[1] - img_rgba.size[1]) // 2\n", + " else:\n", + " x, y = 0, 0\n", + " mask = img_rgba.split()[3] if img_rgba.mode == \"RGBA\" else None\n", + " bg.paste(img_rgba.convert(\"RGB\"), (x,y), mask=mask)\n", + " return bg\n", + " except Exception as e:\n", + " print(f\"[add_background] Error: {e}\")\n", + " return None\n", + "\n", + "def preprocess_for_hash(pil_img, size=(256,256)):\n", + " try:\n", + " img = pil_img.convert(\"RGB\")\n", + " img = ImageOps.grayscale(img)\n", + " img = ImageOps.equalize(img)\n", + " img = img.resize(size)\n", + " return np.array(img).astype(np.uint8)\n", + " except Exception as e:\n", + " print(f\"[preprocess_for_hash] Error: {e}\")\n", + " return None\n", + "\n", + "def preprocess_for_model(pil_img):\n", + " try:\n", + " if pil_img.mode == \"RGBA\":\n", + " pil_img = pil_img.convert(\"RGB\")\n", + " elif pil_img.mode == \"L\":\n", + " pil_img = pil_img.convert(\"RGB\")\n", + " else:\n", + " pil_img = pil_img.convert(\"RGB\")\n", + " return pil_img\n", + " except Exception as e:\n", + " print(f\"[preprocess_for_model] Error: {e}\")\n", + " return None\n", + "\n", + "def get_dinov2_embedding_from_pil(pil_img):\n", + " try:\n", + " if pil_img is None:\n", + " return None\n", + " inputs = dinov2_processor(images=pil_img, return_tensors=\"pt\").to(DEVICE)\n", + " with torch.no_grad():\n", + " outputs = dinov2_model(**inputs)\n", + " # CLS token embedding\n", + " emb = outputs.last_hidden_state[:,0,:].squeeze(0).cpu().numpy()\n", + " n = np.linalg.norm(emb)\n", + " if n == 0 or np.isnan(n):\n", + " return None\n", + " return (emb / n).astype(float)\n", + " except Exception as e:\n", + " print(f\"[get_dinov2_embedding_from_pil] Error: {e}\")\n", + " return None\n", + "\n", + "# -----------------------\n", + "# OpenCV enhancement (accepts PIL)\n", + "# -----------------------\n", + "def pil_to_bgr_np(pil_img):\n", + " arr = np.array(pil_img.convert(\"RGB\"))\n", + " return cv2.cvtColor(arr, cv2.COLOR_RGB2BGR)\n", + "\n", + "def bgr_np_to_pil(bgr_np):\n", + " rgb = cv2.cvtColor(bgr_np, cv2.COLOR_BGR2RGB)\n", + " return Image.fromarray(rgb)\n", + "\n", + "def upscale_image_cv(bgr_np, scale=2):\n", + " h,w = bgr_np.shape[:2]\n", + " return cv2.resize(bgr_np, (w*scale, h*scale), interpolation=cv2.INTER_CUBIC)\n", + "\n", + "def reduce_noise_cv(bgr_np):\n", + " return cv2.fastNlMeansDenoisingColored(bgr_np, None, 10,10,7,21)\n", + "\n", + "def sharpen_cv(bgr_np):\n", + " kernel = np.array([[0,-1,0],[-1,5,-1],[0,-1,0]])\n", + " return cv2.filter2D(bgr_np, -1, kernel)\n", + "\n", + "def enhance_contrast_cv(bgr_np):\n", + " pil_img = Image.fromarray(cv2.cvtColor(bgr_np, cv2.COLOR_BGR2RGB))\n", + " enhancer = ImageEnhance.Contrast(pil_img)\n", + " enhanced = enhancer.enhance(1.5)\n", + " return cv2.cvtColor(np.array(enhanced), cv2.COLOR_RGB2BGR)\n", + "\n", + "def process_image_cv2_from_pil(pil_img, scale=2):\n", + " try:\n", + " bgr = pil_to_bgr_np(pil_img)\n", + " bgr = upscale_image_cv(bgr, scale=scale) if scale != 1 else bgr\n", + " bgr = reduce_noise_cv(bgr)\n", + " bgr = sharpen_cv(bgr)\n", + " bgr = enhance_contrast_cv(bgr)\n", + " return bgr_np_to_pil(bgr)\n", + " except Exception as e:\n", + " print(f\"[process_image_cv2_from_pil] Error: {e}\")\n", + " return None\n", + " \n", + " \n", + "\n", + "# # Phash wrapper providing a small consistent API used in the script\n", + "# class PhashWrapper:\n", + "# def __init__(self):\n", + "# self.have_imagehash = _imagehash is not None\n", + "\n", + "# def encode_image(self, image_array_or_pil):\n", + "# # Accept either a PIL image or numpy array\n", + "# if image_array_or_pil is None:\n", + "# return None\n", + "# try:\n", + "# if isinstance(image_array_or_pil, np.ndarray):\n", + "# im = Image.fromarray(image_array_or_pil)\n", + "# else:\n", + "# im = image_array_or_pil\n", + "# if self.have_imagehash:\n", + "# h = _imagehash.phash(im)\n", + "# return str(h) # hex string\n", + "# else:\n", + "# # fallback: simple average-hash like behavior using downsampled pixels\n", + "# small = im.convert(\"L\").resize((8, 8), Image.BICUBIC)\n", + "# arr = np.asarray(small).astype(np.float32)\n", + "# med = np.median(arr)\n", + "# bits = (arr > med).flatten().astype(int)\n", + "# # encode to hex-like string\n", + "# val = 0\n", + "# for b in bits:\n", + "# val = (val << 1) | int(b)\n", + "# return format(val, \"016x\")\n", + "# except Exception:\n", + "# return None\n", + "\n", + "# def hex_to_hash(self, hexstr: str):\n", + "# # For compatibility with imagehash.ImageHash, we just return the hex string\n", + "# return hexstr\n", + "\n", + "# def hamming_distance(self, a_hex, b_hex):\n", + "# if a_hex is None or b_hex is None:\n", + "# return None\n", + "# try:\n", + "# # convert hex strings to ints and xor\n", + "# ai = int(str(a_hex), 16)\n", + "# bi = int(str(b_hex), 16)\n", + "# x = ai ^ bi\n", + "# # popcount\n", + "# return x.bit_count()\n", + "# except Exception:\n", + "# # fall back to string comparison\n", + "# s1 = str(a_hex)\n", + "# s2 = str(b_hex)\n", + "# # pad to same length with zeros\n", + "# L = max(len(s1), len(s2))\n", + "# s1 = s1.rjust(L, \"0\")\n", + "# s2 = s2.rjust(L, \"0\")\n", + "# hd = sum(ch1 != ch2 for ch1, ch2 in zip(s1, s2))\n", + "# return hd\n", + "\n", + "\n", + "# # Signature wrapper fallback (use ImageSignature if available)\n", + "# class SignatureWrapper:\n", + "# def __init__(self):\n", + "# if _ImageSignature is not None:\n", + "# self._impl = _ImageSignature()\n", + "# else:\n", + "# self._impl = None\n", + "\n", + "# def generate_signature(self, path_or_pil):\n", + "# # If ImageSignature available, accept path\n", + "# if self._impl is not None and isinstance(path_or_pil, str):\n", + "# return self._impl.generate_signature(path_or_pil)\n", + "# # fallback: create a small descriptor (flattened small grayscale array)\n", + "# try:\n", + "# if isinstance(path_or_pil, str):\n", + "# im = Image.open(path_or_pil).convert(\"L\")\n", + "# else:\n", + "# im = path_or_pil.convert(\"L\")\n", + "# small = im.resize((16, 16), Image.BICUBIC)\n", + "# arr = np.asarray(small).astype(np.float32).flatten()\n", + "# # normalize [0,1]\n", + "# if arr.max() > 0:\n", + "# arr = arr / 255.0\n", + "# return arr.tolist()\n", + "# except Exception:\n", + "# return None\n", + "\n", + "# def normalized_distance(self, a, b):\n", + "# # If using real ImageSignature impl\n", + "# if self._impl is not None:\n", + "# return self._impl.normalized_distance(a, b)\n", + "# # else, both a and b are lists/arrays\n", + "# try:\n", + "# aa = np.asarray(a, dtype=float).flatten()\n", + "# bb = np.asarray(b, dtype=float).flatten()\n", + "# # pad to same length\n", + "# L = max(len(aa), len(bb))\n", + "# if len(aa) < L:\n", + "# aa = np.pad(aa, (0, L - len(aa)))\n", + "# if len(bb) < L:\n", + "# bb = np.pad(bb, (0, L - len(bb)))\n", + "# # normalized L2 distance in [0,1] (approx): divide by sqrt(L) to normalize\n", + "# dist = np.linalg.norm(aa - bb) / math.sqrt(L)\n", + "# # clamp to [0,1]\n", + "# return float(min(1.0, max(0.0, dist)))\n", + "# except Exception:\n", + "# return 1.0\n", + "\n", + "\n", + "# # Instantiate shared wrappers\n", + "# _ph = PhashWrapper()\n", + "# _sig = SignatureWrapper()\n", + "\n", + "\n", + "# ------------------------\n", + "# choose_top_candidates (as provided earlier, slightly cleaned)\n", + "# ------------------------\n", + "from collections import defaultdict\n", + "\n", + "\n", + "import os\n", + "import numpy as np\n", + "from typing import Dict, Any, Tuple, List, Optional\n", + "\n", + "def run_query_search_flow(\n", + " query_path: Optional[str] = None,\n", + " query_b64: Optional[str] = None,\n", + " processed_dir: str = \"./processed\",\n", + " embeddings_dict: Dict[str, np.ndarray] = None,\n", + " hash_dict: Dict[str, Any] = None,\n", + " signature_obj_map: Dict[str, Any] = None,\n", + " gis: Any = None,\n", + " phash: Any = None,\n", + " MAX_PHASH_BITS: int = 64,\n", + " k: int = 10,\n", + ") -> Tuple[\n", + " List[Tuple[str, float]],\n", + " List[Tuple[str, Any, float]],\n", + " List[Tuple[str, Any, float]],\n", + " List[Tuple[str, float, float, float, float]],\n", + "]:\n", + " \"\"\"\n", + " Run the full query/search flow (base64 -> preprocess -> embed -> scoring).\n", + " Accepts either query_path (file on disk) OR query_b64 (base64 string). If both are\n", + " provided, query_b64 takes precedence.\n", + "\n", + " Returns:\n", + " embedding_results_sorted,\n", + " phash_results_sorted,\n", + " imgmatch_results_sorted,\n", + " combined_results_sorted\n", + " \"\"\"\n", + "\n", + " # Validate inputs\n", + " if (query_path is None or query_path == \"\") and (query_b64 is None or query_b64 == \"\"):\n", + " raise ValueError(\"Either query_path or query_b64 must be provided.\")\n", + "\n", + " # Ensure processed_dir exists\n", + " os.makedirs(processed_dir, exist_ok=True)\n", + "\n", + " print(\"\\n--- Query/Search Phase ---\")\n", + "\n", + " # 1) Load query image (prefer base64 if provided)\n", + " if query_b64:\n", + " # base64 provided directly -> decode to PIL\n", + " query_from_b64 = base64_to_pil(query_b64)\n", + " if query_from_b64 is None:\n", + " raise RuntimeError(\"Could not decode provided base64 query. Exiting.\")\n", + " query_pil_orig = query_from_b64\n", + " else:\n", + " # load from disk\n", + " if not os.path.exists(query_path):\n", + " raise FileNotFoundError(f\"Query image not found: {query_path}\")\n", + " query_pil_orig = load_image_pil(query_path)\n", + " if query_pil_orig is None:\n", + " raise RuntimeError(\"Could not load query image from path. Exiting.\")\n", + "\n", + " # also create a base64 roundtrip for robustness (keep original behaviour)\n", + " try:\n", + " query_b64 = pil_to_base64(query_pil_orig, fmt=\"PNG\")\n", + " except Exception as e:\n", + " raise RuntimeError(f\"Could not base64 query from disk image: {e}\")\n", + " # keep decoded copy for consistency\n", + " query_from_b64 = base64_to_pil(query_b64)\n", + " if query_from_b64 is None:\n", + " raise RuntimeError(\"Could not decode query base64 after roundtrip. Exiting.\")\n", + "\n", + " # At this point, query_from_b64 is a PIL.Image we can continue with\n", + " # 2) Preprocess with OpenCV enhancement (best-effort; fallback to base64-decoded image)\n", + " enhanced_query_pil = process_image_cv2_from_pil(query_from_b64, scale=2)\n", + " if enhanced_query_pil is None:\n", + " print(\"[Query] OpenCV enhancement failed; falling back to base64-decoded image.\")\n", + " enhanced_query_pil = query_from_b64\n", + "\n", + " # Save the enhanced query (best-effort)\n", + " query_enhanced_path = os.path.join(processed_dir, \"query_enhanced.png\")\n", + " try:\n", + " enhanced_query_pil.save(query_enhanced_path, format=\"PNG\")\n", + " except Exception:\n", + " try:\n", + " enhanced_query_pil.convert(\"RGB\").save(query_enhanced_path, format=\"PNG\")\n", + " except Exception:\n", + " print(\"[Warning] Could not save enhanced query image for inspection.\")\n", + "\n", + " # 3) Query embedding (preprocess -> model)\n", + " prepped = preprocess_for_model(enhanced_query_pil)\n", + " query_emb = get_dinov2_embedding_from_pil(prepped)\n", + " if query_emb is None:\n", + " raise RuntimeError(\"Could not compute query embedding. Exiting.\")\n", + "\n", + " # 4) Query phash computation\n", + " query_hash_arr = preprocess_for_hash(enhanced_query_pil)\n", + " if query_hash_arr is None:\n", + " raise RuntimeError(\"Could not compute query phash array. Exiting.\")\n", + " query_phash = phash.encode_image(image_array=query_hash_arr)\n", + "\n", + " # 5) Query signature generation (best-effort)\n", + " query_sig = None\n", + " query_sig_path = os.path.join(processed_dir, \"query_for_sig.png\")\n", + " try:\n", + " enhanced_query_pil.save(query_sig_path, format=\"PNG\")\n", + " except Exception:\n", + " try:\n", + " enhanced_query_pil.convert(\"RGB\").save(query_sig_path, format=\"PNG\")\n", + " except Exception:\n", + " query_sig_path = None\n", + "\n", + " if query_sig_path:\n", + " try:\n", + " query_sig = gis.generate_signature(query_sig_path)\n", + " except Exception as e:\n", + " print(f\"[ImageSignature] failed for query: {e}\")\n", + " query_sig = None\n", + "\n", + " # -----------------------\n", + " # Prepare stored data arrays\n", + " # -----------------------\n", + " embeddings_dict = embeddings_dict or {}\n", + " hash_dict = hash_dict or {}\n", + " signature_obj_map = signature_obj_map or {}\n", + "\n", + " image_paths = list(embeddings_dict.keys())\n", + " image_embeddings = np.array(list(embeddings_dict.values()), dtype=float) if embeddings_dict else np.array([])\n", + "\n", + " def cosine_similarity(a: np.ndarray, b: np.ndarray) -> float:\n", + " try:\n", + " return float(np.dot(a, b))\n", + " except Exception:\n", + " return -1.0\n", + "\n", + " # Collections\n", + " embedding_results: List[Tuple[str, float]] = []\n", + " phash_results: List[Tuple[str, Any, float]] = []\n", + " imgmatch_results: List[Tuple[str, Any, float]] = []\n", + " combined_results: List[Tuple[str, float, float, float, float]] = []\n", + "\n", + " # Iterate stored images and compute similarities\n", + " for idx, path in enumerate(image_paths):\n", + " # Embedding similarity\n", + " try:\n", + " stored_emb = image_embeddings[idx]\n", + " emb_sim = cosine_similarity(query_emb, stored_emb)\n", + " except Exception:\n", + " emb_sim = -1.0\n", + " embedding_results.append((path, emb_sim))\n", + "\n", + " # PHash similarity (Hamming -> normalized sim)\n", + " try:\n", + " stored_ph = hash_dict.get(path)\n", + " if stored_ph is not None:\n", + " hd = phash.hamming_distance(query_phash, stored_ph)\n", + " ph_sim = max(0.0, 1.0 - (hd / float(MAX_PHASH_BITS)))\n", + " else:\n", + " hd = None\n", + " ph_sim = 0.0\n", + " except Exception:\n", + " hd = None\n", + " ph_sim = 0.0\n", + " phash_results.append((path, hd, ph_sim))\n", + "\n", + " # Image signature similarity (normalized distance -> similarity)\n", + " try:\n", + " stored_sig = signature_obj_map.get(path)\n", + " if stored_sig is not None and query_sig is not None:\n", + " dist = gis.normalized_distance(stored_sig, query_sig)\n", + " im_sim = max(0.0, 1.0 - dist)\n", + " else:\n", + " dist = None\n", + " im_sim = 0.0\n", + " except Exception:\n", + " dist = None\n", + " im_sim = 0.0\n", + " imgmatch_results.append((path, dist, im_sim))\n", + "\n", + " # Combined score: average of the three (embedding is clamped into [0,1])\n", + " emb_clamped = max(0.0, min(1.0, emb_sim))\n", + " combined = (emb_clamped + ph_sim + im_sim) / 3.0\n", + " combined_results.append((path, combined, emb_clamped, ph_sim, im_sim))\n", + "\n", + " # -----------------------\n", + " # Sort results\n", + " # -----------------------\n", + " embedding_results.sort(key=lambda x: x[1], reverse=True)\n", + " phash_results_sorted = sorted(phash_results, key=lambda x: (x[2] is not None, x[2]), reverse=True)\n", + " imgmatch_results_sorted = sorted(imgmatch_results, key=lambda x: (x[2] is not None, x[2]), reverse=True)\n", + " combined_results.sort(key=lambda x: x[1], reverse=True)\n", + "\n", + " # -----------------------\n", + " # Print Top-K results\n", + " # -----------------------\n", + " print(\"\\nTop results by DINOv2 Embeddings:\")\n", + " for i, (path, score) in enumerate(embedding_results[:k], start=1):\n", + " print(f\"Rank {i}: {path} | Cosine: {score:.4f}\")\n", + "\n", + " print(\"\\nTop results by PHash (Hamming distance & normalized sim):\")\n", + " for i, (path, hd, sim) in enumerate(phash_results_sorted[:k], start=1):\n", + " print(f\"Rank {i}: {path} | Hamming: {hd} | NormSim: {sim:.4f}\")\n", + "\n", + " print(\"\\nTop results by ImageSignature (normalized similarity = 1 - distance):\")\n", + " for i, (path, dist, sim) in enumerate(imgmatch_results_sorted[:k], start=1):\n", + " print(f\"Rank {i}: {path} | NormDist: {dist} | NormSim: {sim:.4f}\")\n", + "\n", + " print(\"\\nTop results by Combined Score (avg of embedding|phash|image-match):\")\n", + " for i, (path, combined, emb_clamped, ph_sim, im_sim) in enumerate(combined_results[:k], start=1):\n", + " print(f\"Rank {i}: {path} | Combined: {combined:.4f} | emb: {emb_clamped:.4f} | phash_sim: {ph_sim:.4f} | imgmatch_sim: {im_sim:.4f}\")\n", + "\n", + " print(\"\\nSearch complete.\")\n", + "\n", + " # Return sorted lists for programmatic consumption\n", + " return embedding_results, phash_results_sorted, imgmatch_results_sorted, combined_results\n", + "\n", + "# --------------------------\n", + "# Choose best candidate helper\n", + "# --------------------------\n", + "from collections import defaultdict\n", + "import math\n", + "\n", + "def choose_top_candidates(embedding_results, phash_results, imgmatch_results, top_k=10,\n", + " method_weights=(0.5, 0.3, 0.2), verbose=True):\n", + " \"\"\"\n", + " embedding_results: list of (path, emb_sim) where emb_sim roughly in [-1,1] (we'll clamp to 0..1)\n", + " phash_results: list of (path, hamming, ph_sim) where ph_sim in [0,1]\n", + " imgmatch_results: list of (path, dist, im_sim) where im_sim in [0,1]\n", + " method_weights: weights for (emb, phash, imgmatch) when using weighted average\n", + " returns dict with top candidates from three methods and diagnostics\n", + " \"\"\"\n", + " # Build dicts for quick lookup\n", + " emb_map = {p: float(s) for p, s in embedding_results}\n", + " ph_map = {p: float(sim) for p, _, sim in phash_results}\n", + " im_map = {p: float(sim) for p, _, sim in imgmatch_results}\n", + "\n", + " # Universe of candidates (union)\n", + " all_paths = sorted(set(list(emb_map.keys()) + list(ph_map.keys()) + list(im_map.keys())))\n", + "\n", + " # --- Normalize each metric across candidates to [0,1] ---\n", + " def normalize_map(m):\n", + " vals = [m.get(p, None) for p in all_paths]\n", + " # treat missing as None\n", + " present = [v for v in vals if v is not None and not math.isnan(v)]\n", + " if not present:\n", + " return {p: 0.0 for p in all_paths}\n", + " vmin, vmax = min(present), max(present)\n", + " if vmax == vmin:\n", + " # constant -> map present values to 1.0, missing to 0\n", + " return {p: (1.0 if (m.get(p, None) is not None) else 0.0) for p in all_paths}\n", + " norm = {}\n", + " for p in all_paths:\n", + " v = m.get(p, None)\n", + " if v is None or math.isnan(v):\n", + " norm[p] = 0.0\n", + " else:\n", + " norm[p] = (v - vmin) / (vmax - vmin)\n", + " # clamp\n", + " if norm[p] < 0: norm[p] = 0.0\n", + " if norm[p] > 1: norm[p] = 1.0\n", + " return norm\n", + "\n", + " # For embeddings, clamp negatives to 0 first (optional)\n", + " emb_map_clamped = {}\n", + " for p, v in emb_map.items():\n", + " # common approach: embeddings are cosine in [-1,1]; clamp negatives to 0 to treat as no-sim\n", + " emb_map_clamped[p] = max(0.0, v)\n", + "\n", + " emb_norm = normalize_map(emb_map_clamped)\n", + " ph_norm = normalize_map(ph_map)\n", + " im_norm = normalize_map(im_map)\n", + "\n", + " # --- Method A: Normalized weighted average ---\n", + " w_emb, w_ph, w_im = method_weights\n", + " weighted_scores = {}\n", + " for p in all_paths:\n", + " weighted_scores[p] = (w_emb * emb_norm.get(p, 0.0)\n", + " + w_ph * ph_norm.get(p, 0.0)\n", + " + w_im * im_norm.get(p, 0.0))\n", + "\n", + " top_weighted = sorted(weighted_scores.items(), key=lambda x: x[1], reverse=True)[:top_k]\n", + "\n", + " # --- Method B: Rank-sum (Borda) ---\n", + " # compute ranks per metric (higher value => better rank 1)\n", + " def ranks_from_map(m_norm):\n", + " # bigger is better\n", + " items = sorted(m_norm.items(), key=lambda x: x[1], reverse=True)\n", + " ranks = {}\n", + " for i, (p, _) in enumerate(items):\n", + " ranks[p] = i + 1 # 1-based\n", + " # missing entries get worst rank (len+1)\n", + " worst = len(items) + 1\n", + " for p in all_paths:\n", + " if p not in ranks:\n", + " ranks[p] = worst\n", + " return ranks\n", + "\n", + " rank_emb = ranks_from_map(emb_norm)\n", + " rank_ph = ranks_from_map(ph_norm)\n", + " rank_im = ranks_from_map(im_norm)\n", + "\n", + " rank_sum = {}\n", + " for p in all_paths:\n", + " rank_sum[p] = rank_emb.get(p, 9999) + rank_ph.get(p, 9999) + rank_im.get(p, 9999)\n", + " top_rank_sum = sorted(rank_sum.items(), key=lambda x: x[1])[:top_k] # smaller is better\n", + "\n", + " # --- Method C: Harmonic mean of the normalized scores (penalizes missing/low values) ---\n", + " harm_scores = {}\n", + " for p in all_paths:\n", + " a = emb_norm.get(p, 0.0)\n", + " b = ph_norm.get(p, 0.0)\n", + " c = im_norm.get(p, 0.0)\n", + " # avoid zeros -> harmonic is defined for positive values, but we want to allow zero => it will be 0\n", + " if a + b + c == 0:\n", + " harm = 0.0\n", + " else:\n", + " # harmonic mean for three values: 3 / (1/a + 1/b + 1/c), but if any is zero, result is 0\n", + " if a == 0 or b == 0 or c == 0:\n", + " harm = 0.0\n", + " else:\n", + " harm = 3.0 / ((1.0/a) + (1.0/b) + (1.0/c))\n", + " harm_scores[p] = harm\n", + " top_harm = sorted(harm_scores.items(), key=lambda x: x[1], reverse=True)[:top_k]\n", + "\n", + " # --- Consensus set: items that appear in top-K of each metric individually ---\n", + " def topk_set_by_map(m_norm, k=top_k):\n", + " return set([p for p,_ in sorted(m_norm.items(), key=lambda x: x[1], reverse=True)[:k]])\n", + " cons_set = topk_set_by_map(emb_norm, top_k) & topk_set_by_map(ph_norm, top_k) & topk_set_by_map(im_norm, top_k)\n", + "\n", + " # Build readable outputs\n", + " result = {\n", + " \"emb_norm\": emb_norm,\n", + " \"ph_norm\": ph_norm,\n", + " \"im_norm\": im_norm,\n", + " \"weighted_topk\": top_weighted,\n", + " \"rank_sum_topk\": top_rank_sum,\n", + " \"harmonic_topk\": top_harm,\n", + " \"consensus_topk\": list(cons_set),\n", + " \"weighted_scores_full\": weighted_scores,\n", + " \"rank_sum_full\": rank_sum,\n", + " \"harmonic_full\": harm_scores\n", + " }\n", + "\n", + " if verbose:\n", + " print(\"\\nTop by Weighted Normalized Average (weights emb,ph,img = {:.2f},{:.2f},{:.2f}):\".format(w_emb, w_ph, w_im))\n", + " for i,(p,s) in enumerate(result[\"weighted_topk\"], start=1):\n", + " print(f\" {i}. {p} score={s:.4f} emb={emb_norm.get(p,0):.3f} ph={ph_norm.get(p,0):.3f} im={im_norm.get(p,0):.3f}\")\n", + "\n", + " print(\"\\nTop by Rank-sum (lower is better):\")\n", + " for i,(p,s) in enumerate(result[\"rank_sum_topk\"], start=1):\n", + " print(f\" {i}. {p} rank_sum={s} emb_rank={rank_emb.get(p)} ph_rank={rank_ph.get(p)} img_rank={rank_im.get(p)}\")\n", + "\n", + " print(\"\\nTop by Harmonic mean (requires non-zero on all metrics):\")\n", + " for i,(p,s) in enumerate(result[\"harmonic_topk\"], start=1):\n", + " print(f\" {i}. {p} harm={s:.4f} emb={emb_norm.get(p,0):.3f} ph={ph_norm.get(p,0):.3f} im={im_norm.get(p,0):.3f}\")\n", + "\n", + " print(\"\\nConsensus (in top-{0} of ALL metrics): {1}\".format(top_k, result[\"consensus_topk\"]))\n", + "\n", + " return result\n", + "\n", + "# -----------------------\n", + "# Example usage with your existing variables\n", + "# -----------------------\n", + "# You said you already produced these lists earlier in the script:\n", + "# embedding_results = [(path, emb_sim), ...]\n", + "# phash_results_sorted = [(path, hamming, ph_sim), ...]\n", + "# imgmatch_results_sorted = [(path, dist, im_sim), ...]\n", + "\n", + "# If variable names differ, adjust them accordingly.\n", + "\n", + "# result_pick = choose_top_candidates(embedding_results, phash_results_sorted, imgmatch_results_sorted,\n", + "# top_k=10, method_weights=(0.5,0.3,0.2), verbose=True)\n", + "\n", + "# # Pick final candidate: prefer weighted_topk[0]; if consensus non-empty, prefer consensus highest-weighted among consensus\n", + "# final = None\n", + "# if len(result_pick[\"consensus_topk\"]) > 0:\n", + "# # choose best-weighted among consensus\n", + "# consensus = result_pick[\"consensus_topk\"]\n", + "# best = max(consensus, key=lambda p: result_pick[\"weighted_scores_full\"].get(p, 0.0))\n", + "# final = best\n", + "# else:\n", + "# final = result_pick[\"weighted_topk\"][0][0] if result_pick[\"weighted_topk\"] else None\n", + "\n", + "# print(f\"\\nFINAL selected candidate: {final}\")\n", + "\n", + "\n", + "# -----------------------" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36fc9c5c", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "# def hybrid_similarity_matching(sprite_images_bytes: List[io.BytesIO], sprite_ids: List[str],\n", + "# out_path: str, min_similarity=None, top_k=5, method_weights=(0.5, 0.3, 0.2)):\n", + "# \"\"\"Patched hybrid similarity matching. Returns (indices_list, scores_list, paths_list, debug_info)\n", + "# - sprite_images_bytes: list of file-like objects (bytes) or PIL Images\n", + "# - sprite_ids: list of ids for debug prints\n", + "# - out_path: folder where embeddings, phash, signature files live\n", + "# \"\"\"\n", + "# embeddings_path = os.path.join(out_path, \"hybrid_embeddings.json\")\n", + "# hash_path = os.path.join(out_path, \"phash_data.json\")\n", + "# signature_path = os.path.join(out_path, \"signature_data.json\")\n", + "\n", + "# # Load embeddings file (flexible format)\n", + "# if not os.path.exists(embeddings_path):\n", + "# raise RuntimeError(f\"No embeddings file found at {embeddings_path}\")\n", + "# with open(embeddings_path, \"r\", encoding=\"utf-8\") as f:\n", + "# embedding_json = json.load(f)\n", + "\n", + "# # Load phash dict with normalized keys\n", + "# hash_dict = {}\n", + "# if os.path.exists(hash_path):\n", + "# try:\n", + "# with open(hash_path, \"r\", encoding=\"utf-8\") as f:\n", + "# hash_data = json.load(f)\n", + "# for p, hs in hash_data.items():\n", + "# try:\n", + "# hash_dict[_norm(p)] = _ph.hex_to_hash(hs)\n", + "# except Exception:\n", + "# pass\n", + "# except Exception as e:\n", + "# print(f\"Failed to load phash file: {e}\")\n", + "\n", + "# # Load signatures with normalized keys\n", + "# signature_dict = {}\n", + "# if os.path.exists(signature_path):\n", + "# try:\n", + "# with open(signature_path, \"r\", encoding=\"utf-8\") as f:\n", + "# sig_data = json.load(f)\n", + "# for p, s in sig_data.items():\n", + "# try:\n", + "# signature_dict[_norm(p)] = s\n", + "# except Exception:\n", + "# pass\n", + "# except Exception as e:\n", + "# print(f\"Failed to load signature file: {e}\")\n", + "\n", + "# # Parse embeddings into lists (normalize keys)\n", + "# paths_list = []\n", + "# embeddings_list = []\n", + "# if isinstance(embedding_json, dict):\n", + "# for p, emb in embedding_json.items():\n", + "# maybe_emb = None\n", + "# if isinstance(emb, dict):\n", + "# maybe_emb = emb.get(\"embedding\") or emb.get(\"embeddings\") or emb.get(\"emb\")\n", + "# elif isinstance(emb, list):\n", + "# maybe_emb = emb\n", + "# if maybe_emb is None:\n", + "# continue\n", + "# try:\n", + "# arr = np.asarray(maybe_emb, dtype=np.float32)\n", + "# except Exception:\n", + "# continue\n", + "# norm_p = _norm(p)\n", + "# paths_list.append(norm_p)\n", + "# embeddings_list.append(arr)\n", + "# elif isinstance(embedding_json, list):\n", + "# for item in embedding_json:\n", + "# if not isinstance(item, dict):\n", + "# continue\n", + "# p = item.get(\"path\") or item.get(\"image_path\") or item.get(\"file\") or item.get(\"filename\") or item.get(\"img_path\")\n", + "# emb = item.get(\"embeddings\") or item.get(\"embedding\") or item.get(\"features\") or item.get(\"vector\") or item.get(\"emb\")\n", + "# if p is None or emb is None:\n", + "# continue\n", + "# try:\n", + "# arr = np.asarray(emb, dtype=np.float32)\n", + "# except Exception:\n", + "# continue\n", + "# norm_p = _norm(p)\n", + "# paths_list.append(norm_p)\n", + "# embeddings_list.append(arr)\n", + "\n", + "# if len(paths_list) == 0:\n", + "# raise RuntimeError(\"No reference images/embeddings found in embeddings file\")\n", + "\n", + "# ref_matrix = np.vstack(embeddings_list).astype(np.float32)\n", + "\n", + "# # Prepare sprites: compute embedding/phash/signature\n", + "# sprite_emb_list = []\n", + "# sprite_phash_list = []\n", + "# sprite_sig_list = []\n", + "\n", + "# for i, item in enumerate(sprite_images_bytes):\n", + "# # accept either bytesIO or PIL Image\n", + "# if isinstance(item, Image.Image):\n", + "# sprite_pil = item\n", + "# else:\n", + "# try:\n", + "# item.seek(0)\n", + "# except Exception:\n", + "# pass\n", + "# try:\n", + "# sprite_pil = Image.open(item).convert(\"RGBA\")\n", + "# except Exception:\n", + "# sprite_pil = None\n", + "# enhanced_sprite = process_image_cv2_from_pil(sprite_pil, scale=2) or sprite_pil\n", + "\n", + "# sprite_emb = get_dinov2_embedding_from_pil(preprocess_for_model(enhanced_sprite))\n", + "# if sprite_emb is None:\n", + "# sprite_emb = np.zeros(ref_matrix.shape[1], dtype=np.float32)\n", + "# sprite_emb_list.append(sprite_emb)\n", + "\n", + "# sprite_hash_img = preprocess_for_hash(enhanced_sprite)\n", + "# sprite_phash = None\n", + "# if sprite_hash_img is not None:\n", + "# try:\n", + "# sprite_phash = _ph.encode_image(sprite_hash_img)\n", + "# except Exception:\n", + "# sprite_phash = None\n", + "# sprite_phash_list.append(sprite_phash)\n", + "\n", + "# sprite_sig = None\n", + "# try:\n", + "# temp_path = f\"temp_sprite_{i}.png\"\n", + "# enhanced_sprite.save(temp_path, format=\"PNG\")\n", + "# sprite_sig = _sig.generate_signature(temp_path)\n", + "# os.remove(temp_path)\n", + "# except Exception:\n", + "# sprite_sig = None\n", + "# sprite_sig_list.append(sprite_sig)\n", + "\n", + "# # Embedding similarity (safe normalization)\n", + "# sprite_emb_array = np.stack(sprite_emb_list).astype(np.float32)\n", + "# eps = 1e-10\n", + "# sprite_norms = np.linalg.norm(sprite_emb_array, axis=1, keepdims=True)\n", + "# sprite_norms[sprite_norms == 0] = eps\n", + "# sprite_emb_array_norm = sprite_emb_array / sprite_norms\n", + "\n", + "# ref_norms = np.linalg.norm(ref_matrix, axis=1, keepdims=True)\n", + "# ref_norms[ref_norms == 0] = eps\n", + "# ref_matrix_norm = ref_matrix / ref_norms\n", + "\n", + "# embedding_similarities = np.matmul(sprite_emb_array_norm, ref_matrix_norm.T)\n", + "# # cosine may produce [-1,1] — clamp negatives to 0 if you treat negative as no similarity\n", + "# embedding_similarities = np.clip(embedding_similarities, -1.0, 1.0)\n", + "\n", + "# per_sprite_final_indices = []\n", + "# per_sprite_final_scores = []\n", + "# per_sprite_rerank_debug = []\n", + "\n", + "# for i, sprite_id in enumerate(sprite_ids):\n", + "# embedding_results = [(paths_list[j], float(embedding_similarities[i, j])) for j in range(len(paths_list))]\n", + "# phash_results = []\n", + "# imgmatch_results = []\n", + "# sprite_phash = sprite_phash_list[i]\n", + "# sprite_sig = sprite_sig_list[i]\n", + "\n", + "# for j, ref_path in enumerate(paths_list):\n", + "# ph_sim = 0.0\n", + "# hd = None\n", + "# try:\n", + "# if sprite_phash is not None and ref_path in hash_dict:\n", + "# ref_hash = hash_dict[ref_path]\n", + "# hd = _ph.hamming_distance(sprite_phash, ref_hash)\n", + "# if hd is None:\n", + "# ph_sim = 0.0\n", + "# else:\n", + "# ph_sim = max(0.0, 1.0 - (hd / 64.0))\n", + "# except Exception:\n", + "# ph_sim = 0.0\n", + "# phash_results.append((ref_path, hd if hd is not None else 1000, ph_sim))\n", + "\n", + "# sig_sim = 0.0\n", + "# dist = None\n", + "# try:\n", + "# if sprite_sig is not None and ref_path in signature_dict:\n", + "# stored_sig = signature_dict[ref_path]\n", + "# dist = _sig.normalized_distance(stored_sig, sprite_sig)\n", + "# sig_sim = max(0.0, 1.0 - dist)\n", + "# except Exception:\n", + "# sig_sim = 0.0\n", + "# imgmatch_results.append((ref_path, dist if dist is not None else 1000, sig_sim))\n", + "\n", + "# # debug: top embedding-only candidate\n", + "# top_emb_idx = int(np.argmax(embedding_similarities[i]))\n", + "# top_emb_path = paths_list[top_emb_idx]\n", + "# print(f\"\\n[DEBUG] Sprite '{sprite_id}' - top embedding-only candidate: {top_emb_path} score={embedding_similarities[i, top_emb_idx]:.6f}\")\n", + "# print(f\"[DEBUG] phash present for that path? {top_emb_path in hash_dict}\")\n", + "# print(f\"[DEBUG] sig present for that path? {top_emb_path in signature_dict}\")\n", + "# print(f\"[DEBUG] any NaNs in embedding_similarities? {np.isnan(embedding_similarities).any()}\")\n", + "\n", + "# rerank_result = choose_top_candidates(embedding_results, phash_results, imgmatch_results,\n", + "# top_k=top_k, method_weights=method_weights, verbose=True)\n", + "# per_sprite_rerank_debug.append(rerank_result)\n", + "\n", + "# final = None\n", + "# if len(rerank_result[\"consensus_topk\"]) > 0:\n", + "# consensus = rerank_result[\"consensus_topk\"]\n", + "# best = max(consensus, key=lambda p: rerank_result[\"weighted_scores_full\"].get(p, 0.0))\n", + "# final = best\n", + "# else:\n", + "# final = rerank_result[\"weighted_topk\"][0][0] if rerank_result[\"weighted_topk\"] else None\n", + "\n", + "# if final is not None and final in paths_list:\n", + "# idx = paths_list.index(final)\n", + "# score = rerank_result[\"weighted_scores_full\"].get(final, 0.0)\n", + "# per_sprite_final_indices.append([idx])\n", + "# per_sprite_final_scores.append([score])\n", + "# print(f\"Sprite '{sprite_id}' FINAL selected: {final} (index {idx}) score={score:.4f}\")\n", + "# else:\n", + "# per_sprite_final_indices.append([])\n", + "# per_sprite_final_scores.append([])\n", + "\n", + "# return per_sprite_final_indices, per_sprite_final_scores, paths_list#, per_sprite_rerank_debug\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "17596af4", + "metadata": {}, + "outputs": [], + "source": [ + "# ''' It appends all the list and paths from json files and pick the best match's path'''\n", + "\n", + "# def similarity_matching(sprites_data: dict, project_folder: str, top_k: int = 1, min_similarity: float = None) -> str:\n", + "# print(\"🔍 Running similarity matching…\")\n", + "# os.makedirs(project_folder, exist_ok=True)\n", + "\n", + "# backdrop_base_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\"\n", + "# sprite_base_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\"\n", + "# code_blocks_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\"\n", + "# out_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\"\n", + "\n", + "# project_json_path = os.path.join(project_folder, \"project.json\")\n", + "\n", + "# # -------------------------\n", + "# # Build sprite images list (BytesIO) from sprites_data\n", + "# # -------------------------\n", + "# sprite_ids, sprite_base64 = [], []\n", + "# for sid, sprite in sprites_data.items():\n", + "# sprite_ids.append(sid)\n", + "# sprite_base64.append(sprite[\"base64\"])\n", + "\n", + "# sprite_images_bytes = []\n", + "# for b64 in sprite_base64:\n", + "# img = Image.open(BytesIO(base64.b64decode(b64.split(\",\")[-1]))).convert(\"RGB\")\n", + "# buffer = BytesIO()\n", + "# img.save(buffer, format=\"PNG\")\n", + "# buffer.seek(0)\n", + "# sprite_images_bytes.append(buffer)\n", + "\n", + "# # -----------------------------------------\n", + "# # Hybrid Similarity Matching System\n", + "# # -----------------------------------------\n", + "# def hybrid_similarity_matching(sprite_images_bytes, sprite_ids, \n", + "# min_similarity=None, top_k=5, method_weights=(0.5, 0.3, 0.2)):\n", + "# \"\"\"\n", + "# Hybrid similarity matching using DINOv2 embeddings, perceptual hashing, and image signatures\n", + " \n", + "# Args:\n", + "# sprite_images_bytes: List of image bytes\n", + "# sprite_ids: List of sprite identifiers\n", + "# blocks_dir: Directory containing reference blocks\n", + "# min_similarity: Minimum similarity threshold\n", + "# top_k: Number of top matches to return\n", + "# method_weights: Weights for (embedding, phash, image_signature) methods\n", + " \n", + "# Returns:\n", + "# per_sprite_matched_indices, per_sprite_scores, paths_list\n", + "# \"\"\"\n", + "# import imagehash as phash\n", + "# from image_match.goldberg import ImageSignature\n", + "# import math\n", + "# from collections import defaultdict\n", + " \n", + "# # Load reference data\n", + "# embeddings_path = os.path.join(out_path, \"hybrid_embeddings.json\")\n", + "# hash_path = os.path.join(out_path, \"phash_data.json\") \n", + "# signature_path = os.path.join(out_path, \"signature_data.json\")\n", + " \n", + "# # Load embeddings\n", + "# with open(embeddings_path, \"r\", encoding=\"utf-8\") as f:\n", + "# embedding_json = json.load(f)\n", + " \n", + "# # Load phash data (if exists)\n", + "# hash_dict = {}\n", + "# if os.path.exists(hash_path):\n", + "# with open(hash_path, \"r\", encoding=\"utf-8\") as f:\n", + "# hash_data = json.load(f)\n", + "# for path, hash_str in hash_data.items():\n", + "# try:\n", + "# hash_dict[path] = phash.hex_to_hash(hash_str)\n", + "# except:\n", + "# pass\n", + " \n", + "# # Load signature data (if exists)\n", + "# signature_dict = {}\n", + "# gis = ImageSignature()\n", + "# if os.path.exists(signature_path):\n", + "# with open(signature_path, \"r\", encoding=\"utf-8\") as f:\n", + "# sig_data = json.load(f)\n", + "# for path, sig_list in sig_data.items():\n", + "# try:\n", + "# signature_dict[path] = np.array(sig_list)\n", + "# except:\n", + "# pass\n", + " \n", + "# # Parse embeddings\n", + "# paths_list = []\n", + "# embeddings_list = []\n", + " \n", + "# if isinstance(embedding_json, dict):\n", + "# for p, emb in embedding_json.items():\n", + "# if isinstance(emb, dict):\n", + "# maybe_emb = emb.get(\"embedding\") or emb.get(\"embeddings\") or emb.get(\"emb\")\n", + "# if maybe_emb is None:\n", + "# continue\n", + "# arr = np.asarray(maybe_emb, dtype=np.float32)\n", + "# elif isinstance(emb, list):\n", + "# arr = np.asarray(emb, dtype=np.float32)\n", + "# else:\n", + "# continue\n", + "# paths_list.append(os.path.normpath(str(p)))\n", + "# embeddings_list.append(arr)\n", + "# elif isinstance(embedding_json, list):\n", + "# for item in embedding_json:\n", + "# if not isinstance(item, dict):\n", + "# continue\n", + "# p = item.get(\"path\") or item.get(\"image_path\") or item.get(\"file\") or item.get(\"filename\") or item.get(\"img_path\")\n", + "# emb = item.get(\"embeddings\") or item.get(\"embedding\") or item.get(\"features\") or item.get(\"vector\") or item.get(\"emb\")\n", + "# if p is None or emb is None:\n", + "# continue\n", + "# paths_list.append(os.path.normpath(str(p)))\n", + "# embeddings_list.append(np.asarray(emb, dtype=np.float32))\n", + " \n", + "# if len(paths_list) == 0:\n", + "# raise RuntimeError(\"No reference images/embeddings found\")\n", + " \n", + "# ref_matrix = np.vstack(embeddings_list).astype(np.float32)\n", + " \n", + "# # Process input sprites\n", + "# # init_dinov2()\n", + "# per_sprite_matched_indices = []\n", + "# per_sprite_scores = []\n", + " \n", + "# for i, (sprite_bytes, sprite_id) in enumerate(zip(sprite_images_bytes, sprite_ids)):\n", + "# print(f\"Processing sprite {i+1}/{len(sprite_ids)}: {sprite_id}\")\n", + " \n", + "# # Convert bytes to PIL for processing\n", + "# sprite_pil = Image.open(sprite_bytes)\n", + "# if sprite_pil is None:\n", + "# per_sprite_matched_indices.append([])\n", + "# per_sprite_scores.append([])\n", + "# continue\n", + " \n", + "# # Enhance image\n", + "# enhanced_sprite = process_image_cv2_from_pil(sprite_pil, scale=2)\n", + "# if enhanced_sprite is None:\n", + "# enhanced_sprite = sprite_pil\n", + " \n", + "# # 1. Compute DINOv2 embedding\n", + "# sprite_emb = get_dinov2_embedding_from_pil(preprocess_for_model(enhanced_sprite))\n", + "# if sprite_emb is None:\n", + "# sprite_emb = np.zeros(ref_matrix.shape[1])\n", + " \n", + "# # 2. Compute perceptual hash\n", + "# sprite_hash_arr = preprocess_for_hash(enhanced_sprite)\n", + "# sprite_phash = None\n", + "# if sprite_hash_arr is not None:\n", + "# try:\n", + "# sprite_phash = phash.encode_image(image_array=sprite_hash_arr)\n", + "# except:\n", + "# pass\n", + " \n", + "# # 3. Compute image signature\n", + "# sprite_sig = None\n", + "# try:\n", + "# temp_path = f\"temp_sprite_{i}.png\"\n", + "# enhanced_sprite.save(temp_path, format=\"PNG\")\n", + "# sprite_sig = gis.generate_signature(temp_path)\n", + "# os.remove(temp_path)\n", + "# except:\n", + "# pass\n", + " \n", + "# # Calculate similarities for all reference images\n", + "# embedding_results = []\n", + "# phash_results = []\n", + "# signature_results = []\n", + " \n", + "# for j, ref_path in enumerate(paths_list):\n", + "# # Embedding similarity\n", + "# try:\n", + "# ref_emb = ref_matrix[j]\n", + "# emb_sim = float(np.dot(sprite_emb, ref_emb))\n", + "# emb_sim = max(0.0, emb_sim) # Clamp negative values\n", + "# except:\n", + "# emb_sim = 0.0\n", + "# embedding_results.append((ref_path, emb_sim))\n", + " \n", + "# # Phash similarity\n", + "# ph_sim = 0.0\n", + "# if sprite_phash is not None and ref_path in hash_dict:\n", + "# try:\n", + "# ref_hash = hash_dict[ref_path]\n", + "# hd = phash.hamming_distance(sprite_phash, ref_hash)\n", + "# ph_sim = max(0.0, 1.0 - (hd / 64.0)) # Normalize to [0,1]\n", + "# except:\n", + "# pass\n", + "# phash_results.append((ref_path, ph_sim))\n", + " \n", + "# # Signature similarity\n", + "# sig_sim = 0.0\n", + "# if sprite_sig is not None and ref_path in signature_dict:\n", + "# try:\n", + "# ref_sig = signature_dict[ref_path]\n", + "# dist = gis.normalized_distance(ref_sig, sprite_sig)\n", + "# sig_sim = max(0.0, 1.0 - dist)\n", + "# except:\n", + "# pass\n", + "# signature_results.append((ref_path, sig_sim))\n", + " \n", + "# # Combine similarities using weighted approach\n", + "# def normalize_scores(scores):\n", + "# \"\"\"Normalize scores to [0,1] range\"\"\"\n", + "# if not scores:\n", + "# return {}\n", + "# vals = [s for _, s in scores if not math.isnan(s)]\n", + "# if not vals:\n", + "# return {p: 0.0 for p, _ in scores}\n", + "# vmin, vmax = min(vals), max(vals)\n", + "# if vmax == vmin:\n", + "# return {p: 1.0 if s == vmax else 0.0 for p, s in scores}\n", + "# return {p: (s - vmin) / (vmax - vmin) for p, s in scores}\n", + " \n", + "# # Normalize each method's scores\n", + "# emb_norm = normalize_scores(embedding_results)\n", + "# ph_norm = normalize_scores(phash_results)\n", + "# sig_norm = normalize_scores(signature_results)\n", + " \n", + "# # Calculate weighted combined scores\n", + "# w_emb, w_ph, w_sig = method_weights\n", + "# combined_scores = []\n", + " \n", + "# for ref_path in paths_list:\n", + "# combined_score = (w_emb * emb_norm.get(ref_path, 0.0) + \n", + "# w_ph * ph_norm.get(ref_path, 0.0) + \n", + "# w_sig * sig_norm.get(ref_path, 0.0))\n", + "# combined_scores.append((ref_path, combined_score))\n", + " \n", + "# # Sort by combined score and apply thresholds\n", + "# combined_scores.sort(key=lambda x: x[1], reverse=True)\n", + " \n", + "# # Filter by minimum similarity if specified\n", + "# if min_similarity is not None:\n", + "# combined_scores = [(p, s) for p, s in combined_scores if s >= float(min_similarity)]\n", + " \n", + "# # Get top-k matches\n", + "# top_matches = combined_scores[:int(top_k)]\n", + " \n", + "# # Convert to indices and scores\n", + "# matched_indices = []\n", + "# matched_scores = []\n", + " \n", + "# for ref_path, score in top_matches:\n", + "# try:\n", + "# idx = paths_list.index(ref_path)\n", + "# matched_indices.append(idx)\n", + "# matched_scores.append(score)\n", + "# except ValueError:\n", + "# continue\n", + " \n", + "# per_sprite_matched_indices.append(matched_indices)\n", + "# per_sprite_scores.append(matched_scores)\n", + " \n", + "# print(f\"Sprite '{sprite_id}' matched {len(matched_indices)} references with scores: {matched_scores}\")\n", + "\n", + "# return per_sprite_matched_indices, per_sprite_scores, paths_list\n", + " \n", + "# def choose_top_candidates_advanced(embedding_results, phash_results, imgmatch_results, top_k=10,\n", + "# method_weights=(0.5, 0.3, 0.2), verbose=True):\n", + "# \"\"\"\n", + "# Advanced candidate selection using multiple ranking methods\n", + " \n", + "# Args:\n", + "# embedding_results: list of (path, emb_sim)\n", + "# phash_results: list of (path, hamming, ph_sim) \n", + "# imgmatch_results: list of (path, dist, im_sim)\n", + "# top_k: number of top candidates to return\n", + "# method_weights: weights for (emb, phash, imgmatch)\n", + "# verbose: whether to print detailed results\n", + " \n", + "# Returns:\n", + "# dict with top candidates from different methods and final selection\n", + "# \"\"\"\n", + "# import math\n", + "# from collections import defaultdict\n", + " \n", + "# # Build dicts for quick lookup\n", + "# emb_map = {p: float(s) for p, s in embedding_results}\n", + "# ph_map = {p: float(sim) for p, _, sim in phash_results}\n", + "# im_map = {p: float(sim) for p, _, sim in imgmatch_results}\n", + "\n", + "# # Universe of candidates (union)\n", + "# all_paths = sorted(set(list(emb_map.keys()) + list(ph_map.keys()) + list(im_map.keys())))\n", + "\n", + "# # Normalize each metric across candidates to [0,1]\n", + "# def normalize_map(m):\n", + "# vals = [m.get(p, None) for p in all_paths]\n", + "# present = [v for v in vals if v is not None and not math.isnan(v)]\n", + "# if not present:\n", + "# return {p: 0.0 for p in all_paths}\n", + "# vmin, vmax = min(present), max(present)\n", + "# if vmax == vmin:\n", + "# return {p: (1.0 if (m.get(p, None) is not None) else 0.0) for p in all_paths}\n", + "# norm = {}\n", + "# for p in all_paths:\n", + "# v = m.get(p, None)\n", + "# if v is None or math.isnan(v):\n", + "# norm[p] = 0.0\n", + "# else:\n", + "# norm[p] = max(0.0, min(1.0, (v - vmin) / (vmax - vmin)))\n", + "# return norm\n", + "\n", + "# # For embeddings, clamp negatives to 0 first\n", + "# emb_map_clamped = {p: max(0.0, v) for p, v in emb_map.items()}\n", + "\n", + "# emb_norm = normalize_map(emb_map_clamped)\n", + "# ph_norm = normalize_map(ph_map)\n", + "# im_norm = normalize_map(im_map)\n", + "\n", + "# # Method A: Normalized weighted average\n", + "# w_emb, w_ph, w_im = method_weights\n", + "# weighted_scores = {}\n", + "# for p in all_paths:\n", + "# weighted_scores[p] = (w_emb * emb_norm.get(p, 0.0)\n", + "# + w_ph * ph_norm.get(p, 0.0)\n", + "# + w_im * im_norm.get(p, 0.0))\n", + "\n", + "# top_weighted = sorted(weighted_scores.items(), key=lambda x: x[1], reverse=True)[:top_k]\n", + "\n", + "# # Method B: Rank-sum (Borda)\n", + "# def ranks_from_map(m_norm):\n", + "# items = sorted(m_norm.items(), key=lambda x: x[1], reverse=True)\n", + "# ranks = {}\n", + "# for i, (p, _) in enumerate(items):\n", + "# ranks[p] = i + 1 # 1-based\n", + "# worst = len(items) + 1\n", + "# for p in all_paths:\n", + "# if p not in ranks:\n", + "# ranks[p] = worst\n", + "# return ranks\n", + "\n", + "# rank_emb = ranks_from_map(emb_norm)\n", + "# rank_ph = ranks_from_map(ph_norm)\n", + "# rank_im = ranks_from_map(im_norm)\n", + "\n", + "# rank_sum = {}\n", + "# for p in all_paths:\n", + "# rank_sum[p] = rank_emb.get(p, 9999) + rank_ph.get(p, 9999) + rank_im.get(p, 9999)\n", + "# top_rank_sum = sorted(rank_sum.items(), key=lambda x: x[1])[:top_k] # smaller is better\n", + "\n", + "# # Method C: Harmonic mean\n", + "# harm_scores = {}\n", + "# for p in all_paths:\n", + "# a = emb_norm.get(p, 0.0)\n", + "# b = ph_norm.get(p, 0.0)\n", + "# c = im_norm.get(p, 0.0)\n", + "# if a + b + c == 0 or a == 0 or b == 0 or c == 0:\n", + "# harm = 0.0\n", + "# else:\n", + "# harm = 3.0 / ((1.0/a) + (1.0/b) + (1.0/c))\n", + "# harm_scores[p] = harm\n", + "# top_harm = sorted(harm_scores.items(), key=lambda x: x[1], reverse=True)[:top_k]\n", + "\n", + "# # Consensus set: items in top-K of each metric\n", + "# def topk_set_by_map(m_norm, k=top_k):\n", + "# return set([p for p,_ in sorted(m_norm.items(), key=lambda x: x[1], reverse=True)[:k]])\n", + "# cons_set = topk_set_by_map(emb_norm, top_k) & topk_set_by_map(ph_norm, top_k) & topk_set_by_map(im_norm, top_k)\n", + "\n", + "# result = {\n", + "# \"emb_norm\": emb_norm,\n", + "# \"ph_norm\": ph_norm,\n", + "# \"im_norm\": im_norm,\n", + "# \"weighted_topk\": top_weighted,\n", + "# \"rank_sum_topk\": top_rank_sum,\n", + "# \"harmonic_topk\": top_harm,\n", + "# \"consensus_topk\": list(cons_set),\n", + "# \"weighted_scores_full\": weighted_scores,\n", + "# \"rank_sum_full\": rank_sum,\n", + "# \"harmonic_full\": harm_scores\n", + "# }\n", + "\n", + "# if verbose:\n", + "# print(f\"\\nTop by Weighted Average (weights emb,ph,img = {w_emb:.2f},{w_ph:.2f},{w_im:.2f}):\")\n", + "# for i,(p,s) in enumerate(result[\"weighted_topk\"], start=1):\n", + "# print(f\" {i}. {p} score={s:.4f} emb={emb_norm.get(p,0):.3f} ph={ph_norm.get(p,0):.3f} im={im_norm.get(p,0):.3f}\")\n", + "\n", + "# print(\"\\nTop by Rank-sum (lower is better):\")\n", + "# for i,(p,s) in enumerate(result[\"rank_sum_topk\"], start=1):\n", + "# print(f\" {i}. {p} rank_sum={s} emb_rank={rank_emb.get(p)} ph_rank={rank_ph.get(p)} img_rank={rank_im.get(p)}\")\n", + "\n", + "# print(\"\\nTop by Harmonic mean:\")\n", + "# for i,(p,s) in enumerate(result[\"harmonic_topk\"], start=1):\n", + "# print(f\" {i}. {p} harm={s:.4f} emb={emb_norm.get(p,0):.3f} ph={ph_norm.get(p,0):.3f} im={im_norm.get(p,0):.3f}\")\n", + "\n", + "# print(f\"\\nConsensus (in top-{top_k} of ALL metrics): {result['consensus_topk']}\")\n", + "\n", + "# # Final selection logic\n", + "# final = None\n", + "# if len(result[\"consensus_topk\"]) > 0:\n", + "# # Choose best-weighted among consensus\n", + "# consensus = result[\"consensus_topk\"]\n", + "# best = max(consensus, key=lambda p: result[\"weighted_scores_full\"].get(p, 0.0))\n", + "# final = best\n", + "# else:\n", + "# final = result[\"weighted_topk\"][0][0] if result[\"weighted_topk\"] else None\n", + "\n", + "# result[\"final_selection\"] = final\n", + "# return result\n", + " \n", + "# # Use hybrid matching system\n", + "# BLOCKS_DIR = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\"\n", + "# per_sprite_matched_indices, per_sprite_scores, paths_list = hybrid_similarity_matching(\n", + "# sprite_images_bytes, sprite_ids, min_similarity, top_k, method_weights=(0.5, 0.3, 0.2)\n", + "# )\n", + "\n", + "# # =========================================\n", + "# # Copy matched sprite assets + collect data\n", + "# # =========================================\n", + "# project_data = []\n", + "# backdrop_data = []\n", + "# copied_sprite_folders = set()\n", + "# copied_backdrop_folders = set()\n", + "\n", + "# # Flatten unique matched indices to process copying once per folder\n", + "# matched_indices = sorted({idx for lst in per_sprite_matched_indices for idx in lst})\n", + "# print(\"matched_indices------------------>\",matched_indices)\n", + "# # from pathlib import Path\n", + "\n", + "# # # normalize & resolve once at top of function\n", + "# # sprite_base_p = Path(sprite_base_path).resolve()\n", + "# # backdrop_base_p = Path(backdrop_base_path).resolve()\n", + "# # project_folder_p = Path(project_folder).resolve()\n", + "# # project_folder_p.mkdir(parents=True, exist_ok=True) # ensure dest exists\n", + "\n", + "# # for matched_idx in matched_indices:\n", + "# # matched_image_path = paths_list[matched_idx]\n", + "# # matched_folder = Path(matched_image_path).parent\n", + "# # matched_filename = Path(matched_image_path).name\n", + " \n", + "# # print(f\" matched_image_path = {matched_image_path}\")\n", + "# # print(f\" matched_folder = {matched_folder}\")\n", + "# # print(f\" matched_filename = {matched_filename}\")\n", + "\n", + "# # # resolve matched_folder (safe even if it's already absolute)\n", + "# # try:\n", + "# # matched_folder_p = matched_folder.resolve()\n", + "# # except Exception:\n", + "# # # fallback to absolute path join if resolve fails\n", + "# # matched_folder_p = (Path.cwd() / matched_folder).resolve()\n", + "\n", + "# # print(f\"Processing matched image: {matched_image_path}\")\n", + "# # print(f\" matched_folder_p = {matched_folder_p}\")\n", + "# # print(f\" sprite_base_p = {sprite_base_p}\")\n", + "# # print(f\" backdrop_base_p = {backdrop_base_p}\")\n", + "\n", + "# # # Check if matched_folder is under sprite_base_p\n", + "# # is_sprite = False\n", + "# # try:\n", + "# # matched_folder_p.relative_to(sprite_base_p)\n", + "# # is_sprite = True\n", + "# # print(\"is_sprite----->\",is_sprite)\n", + "# # except Exception:\n", + "# # is_sprite = False\n", + "# # print(\"is_sprite----->\",is_sprite)\n", + " \n", + "# # if is_sprite and str(matched_folder) not in copied_sprite_folders:\n", + "# # print(f\"Processing SPRITE folder: {matched_folder}\")\n", + "# # copied_sprite_folders.add(str(matched_folder))\n", + "# # sprite_json_path = matched_folder / \"sprite.json\"\n", + "# # # ... (rest of your logic, but use Path objects)\n", + "# # sprite_files = list(matched_folder.iterdir())\n", + "# # print(f\" Files in sprite folder: {[p.name for p in sprite_files]}\")\n", + "# # for p in sprite_files:\n", + "# # fname = p.name\n", + "# # if fname in (matched_filename, \"sprite.json\"):\n", + "# # print(f\" Skipping {fname} (matched image or sprite.json)\")\n", + "# # continue\n", + "# # if p.is_file():\n", + "# # dst = project_folder_p / fname\n", + "# # try:\n", + "# # shutil.copy2(str(p), str(dst))\n", + "# # print(f\" ✓ Copied sprite asset: {p} -> {dst}\")\n", + "# # except Exception as e:\n", + "# # print(f\" ✗ Failed to copy sprite asset {p}: {e}\")\n", + "# # else:\n", + "# # print(f\" Skipping {fname} (not a file)\")\n", + "\n", + "# # # Check if matched_folder is under backdrop_base_p\n", + "# # is_backdrop = False\n", + "# # try:\n", + "# # matched_folder_p.relative_to(backdrop_base_p)\n", + "# # is_backdrop = True\n", + "# # print(\"is_backdrop----->\",is_backdrop)\n", + "# # except Exception:\n", + "# # is_backdrop = False\n", + "# # print(\"is_backdrop----->\",is_backdrop)\n", + "\n", + "# # if is_backdrop and str(matched_folder) not in copied_backdrop_folders:\n", + "# # print(f\"Processing BACKDROP folder: {matched_folder}\")\n", + "# # copied_backdrop_folders.add(str(matched_folder))\n", + "# # # copy matched backdrop image\n", + "# # backdrop_dst = project_folder_p / matched_filename\n", + "# # try:\n", + "# # shutil.copy2(str(matched_folder / matched_filename), str(backdrop_dst))\n", + "# # print(f\" ✓ Copied matched backdrop image: {matched_folder / matched_filename} -> {backdrop_dst}\")\n", + "# # except Exception as e:\n", + "# # print(f\" ✗ Failed to copy matched backdrop image {matched_folder / matched_filename}: {e}\")\n", + "\n", + "# # # copy other files similarly using Path.iterdir()\n", + "# # # read project.json at matched_folder / \"project.json\"\n", + "# # pj = matched_folder / \"project.json\"\n", + "# # if pj.exists():\n", + "# # try:\n", + "# # with pj.open(\"r\", encoding=\"utf-8\") as f:\n", + "# # bd_json = json.load(f)\n", + "# # stage_count = 0\n", + "# # for tgt in bd_json.get(\"targets\", []):\n", + "# # if tgt.get(\"isStage\"):\n", + "# # backdrop_data.append(tgt)\n", + "# # stage_count += 1\n", + "# # print(f\" ✓ Successfully read project.json from {matched_folder}, found {stage_count} stage(s)\")\n", + "# # except Exception as e:\n", + "# # print(f\" ✗ Failed to read project.json in {matched_folder}: {e}\")\n", + "# # else:\n", + "# # print(f\" ⚠ No project.json in {matched_folder}\")\n", + "\n", + "# # print(\"---\")\n", + "\n", + "# for matched_idx in matched_indices:\n", + "# matched_image_path = paths_list[matched_idx]\n", + "# matched_folder = os.path.dirname(matched_image_path)\n", + "# matched_filename = os.path.basename(matched_image_path)\n", + " \n", + "# print(f\"Processing matched image: {matched_image_path}\")\n", + "# print(f\" - Folder: {matched_folder}\")\n", + "# print(f\" - Sprite path: {sprite_base_path}\")\n", + "# print(f\" - Backdrop path: {backdrop_base_path}\")\n", + "# print(f\" - Filename: {matched_filename}\")\n", + " \n", + "# # If it's a sprite (under SPRITE_DIR) -> copy sprite assets and read sprite.json\n", + "# if matched_folder.startswith(sprite_base_path) and matched_folder not in copied_sprite_folders:\n", + "# print(f\"Processing SPRITE folder: {matched_folder}\")\n", + "# copied_sprite_folders.add(matched_folder)\n", + "# sprite_json_path = os.path.join(matched_folder, \"sprite.json\")\n", + "# print(\"sprite_json_path----------------------->\",sprite_json_path)\n", + "# print(\"copied sprite folder----------------------->\",copied_sprite_folders)\n", + "# if os.path.exists(sprite_json_path):\n", + "# try:\n", + "# with open(sprite_json_path, \"r\", encoding=\"utf-8\") as f:\n", + "# sprite_info = json.load(f)\n", + "# project_data.append(sprite_info)\n", + "# print(f\" ✓ Successfully read sprite.json from {matched_folder}\")\n", + "# except Exception as e:\n", + "# print(f\" ✗ Failed to read sprite.json in {matched_folder}: {e}\")\n", + "# else:\n", + "# print(f\" ⚠ No sprite.json in {matched_folder}\")\n", + " \n", + "# # copy non-matching files from the sprite folder (except the matched image and sprite.json)\n", + "# sprite_files = os.listdir(matched_folder)\n", + "# print(f\" Files in sprite folder: {sprite_files}\")\n", + "# for fname in sprite_files:\n", + "# if fname in (matched_filename, \"sprite.json\"):\n", + "# print(f\" Skipping {fname} (matched image or sprite.json)\")\n", + "# continue\n", + "# src = os.path.join(matched_folder, fname)\n", + "# dst = os.path.join(project_folder, fname)\n", + "# if os.path.isfile(src):\n", + "# try:\n", + "# shutil.copy2(src, dst)\n", + "# print(f\" ✓ Copied sprite asset: {src} -> {dst}\")\n", + "# except Exception as e:\n", + "# print(f\" ✗ Failed to copy sprite asset {src}: {e}\")\n", + "# else:\n", + "# print(f\" Skipping {fname} (not a file)\")\n", + "\n", + "# # If it's a backdrop (under BACKDROP_DIR) -> copy backdrop assets and read project.json for stage\n", + "# if matched_folder.startswith(backdrop_base_path) and matched_folder not in copied_backdrop_folders:\n", + "# print(f\"Processing BACKDROP folder: {matched_folder}\")\n", + "# copied_backdrop_folders.add(matched_folder)\n", + "# print(\"backdrop_base_path----------------------->\",backdrop_base_path)\n", + "# print(\"copied backdrop folder----------------------->\",copied_backdrop_folders)\n", + "# # copy matched backdrop image\n", + "# backdrop_dst = os.path.join(project_folder, matched_filename)\n", + "# try:\n", + "# shutil.copy2(matched_image_path, backdrop_dst)\n", + "# print(f\" ✓ Copied matched backdrop image: {matched_image_path} -> {backdrop_dst}\")\n", + "# except Exception as e:\n", + "# print(f\" ✗ Failed to copy matched backdrop image {matched_image_path}: {e}\")\n", + "\n", + "# # copy other files from folder (skip project.json and matched image)\n", + "# backdrop_files = os.listdir(matched_folder)\n", + "# print(f\" Files in backdrop folder: {backdrop_files}\")\n", + "# for fname in backdrop_files:\n", + "# if fname in (matched_filename, \"project.json\"):\n", + "# print(f\" Skipping {fname} (matched image or project.json)\")\n", + "# continue\n", + "# src = os.path.join(matched_folder, fname)\n", + "# dst = os.path.join(project_folder, fname)\n", + "# if os.path.isfile(src):\n", + "# try:\n", + "# shutil.copy2(src, dst)\n", + "# print(f\" ✓ Copied backdrop asset: {src} -> {dst}\")\n", + "# except Exception as e:\n", + "# print(f\" ✗ Failed to copy backdrop asset {src}: {e}\")\n", + "# else:\n", + "# print(f\" Skipping {fname} (not a file)\")\n", + "\n", + "# # read project.json to extract Stage/targets\n", + "# pj = os.path.join(matched_folder, \"project.json\")\n", + "# if os.path.exists(pj):\n", + "# try:\n", + "# with open(pj, \"r\", encoding=\"utf-8\") as f:\n", + "# bd_json = json.load(f)\n", + "# stage_count = 0\n", + "# for tgt in bd_json.get(\"targets\", []):\n", + "# if tgt.get(\"isStage\"):\n", + "# backdrop_data.append(tgt)\n", + "# stage_count += 1\n", + "# print(f\" ✓ Successfully read project.json from {matched_folder}, found {stage_count} stage(s)\")\n", + "# except Exception as e:\n", + "# print(f\" ✗ Failed to read project.json in {matched_folder}: {e}\")\n", + "# else:\n", + "# print(f\" ⚠ No project.json in {matched_folder}\")\n", + " \n", + "# print(\"---\")\n", + "\n", + "# # --- Merge into final Scratch project.json (identical logic to before)\n", + "# final_project = {\n", + "# \"targets\": [], \"monitors\": [], \"extensions\": [],\n", + "# \"meta\": {\n", + "# \"semver\": \"3.0.0\",\n", + "# \"vm\": \"11.3.0\",\n", + "# \"agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36\"\n", + "# }\n", + "# }\n", + "\n", + "# # Add sprite targets (non-stage)\n", + "# for spr in project_data:\n", + "# if not spr.get(\"isStage\", False):\n", + "# final_project[\"targets\"].append(spr)\n", + "\n", + "# # Add Stage/backdrop\n", + "# if backdrop_data:\n", + "# all_costumes, sounds = [], []\n", + "# seen_costumes = set()\n", + "# for i, bd in enumerate(backdrop_data):\n", + "# for costume in bd.get(\"costumes\", []):\n", + "# key = (costume.get(\"name\"), costume.get(\"assetId\"))\n", + "# if key not in seen_costumes:\n", + "# seen_costumes.add(key)\n", + "# all_costumes.append(costume)\n", + "# if i == 0:\n", + "# sounds = bd.get(\"sounds\", [])\n", + "# stage_obj = {\n", + "# \"isStage\": True,\n", + "# \"name\": \"Stage\",\n", + "# \"objName\": \"Stage\",\n", + "# \"variables\": {},\n", + "# \"lists\": {},\n", + "# \"broadcasts\": {},\n", + "# \"blocks\": {},\n", + "# \"comments\": {},\n", + "# \"currentCostume\": 1 if len(all_costumes) > 1 else 0,\n", + "# \"costumes\": all_costumes,\n", + "# \"sounds\": sounds,\n", + "# \"volume\": 100,\n", + "# \"layerOrder\": 0,\n", + "# \"tempo\": 60,\n", + "# \"videoTransparency\": 50,\n", + "# \"videoState\": \"on\",\n", + "# \"textToSpeechLanguage\": None\n", + "# }\n", + "# final_project[\"targets\"].insert(0, stage_obj)\n", + "# else:\n", + "# # fallback to default backdrop same as your previous logic\n", + "# print(\"⚠️ No backdrop matched. Using default static backdrop.\")\n", + "# default_backdrop_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + "# default_backdrop_name = \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + "# default_backdrop_sound = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\83a9787d4cb6f3b7632b4ddfebf74367.wav\"\n", + "# default_backdrop_sound_name = \"83a9787d4cb6f3b7632b4ddfebf74367.wav\"\n", + "# try:\n", + "# shutil.copy2(default_backdrop_path, os.path.join(project_folder, default_backdrop_name))\n", + "# shutil.copy2(default_backdrop_sound, os.path.join(project_folder, default_backdrop_sound_name))\n", + "# except Exception as e:\n", + "# print(\"❌ Failed to copy default backdrop: %s\", e)\n", + "\n", + "# stage_obj = {\n", + "# \"isStage\": True,\n", + "# \"name\": \"Stage\",\n", + "# \"objName\": \"Stage\",\n", + "# \"variables\": {},\n", + "# \"lists\": {},\n", + "# \"broadcasts\": {},\n", + "# \"blocks\": {},\n", + "# \"comments\": {},\n", + "# \"currentCostume\": 0,\n", + "# \"costumes\": [\n", + "# {\n", + "# \"assetId\": default_backdrop_name.split(\".\")[0],\n", + "# \"name\": \"defaultBackdrop\",\n", + "# \"md5ext\": default_backdrop_name,\n", + "# \"dataFormat\": \"svg\",\n", + "# \"rotationCenterX\": 240,\n", + "# \"rotationCenterY\": 180\n", + "# }\n", + "# ],\n", + "# \"sounds\": [\n", + "# {\n", + "# \"name\": \"pop\",\n", + "# \"assetId\": \"83a9787d4cb6f3b7632b4ddfebf74367\",\n", + "# \"dataFormat\": \"wav\",\n", + "# \"format\": \"\",\n", + "# \"rate\": 48000,\n", + "# \"sampleCount\": 1123,\n", + "# \"md5ext\": \"83a9787d4cb6f3b7632b4ddfebf74367.wav\"\n", + "# }\n", + "# ],\n", + "# \"volume\": 100,\n", + "# \"layerOrder\": 0,\n", + "# \"tempo\": 60,\n", + "# \"videoTransparency\": 50,\n", + "# \"videoState\": \"on\",\n", + "# \"textToSpeechLanguage\": None\n", + "# }\n", + "# final_project[\"targets\"].insert(0, stage_obj)\n", + "\n", + "# # --- write out project.json\n", + "# with open(project_json_path, \"w\", encoding=\"utf-8\") as f:\n", + "# json.dump(final_project, f, indent=2)\n", + "\n", + "# return project_json_path" + ] + }, + { + "cell_type": "markdown", + "id": "2befe6f8", + "metadata": {}, + "source": [ + "## updated similarity matching flow" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "bcffadcf", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "import numpy as np\n", + "from io import BytesIO\n", + "from PIL import Image\n", + "from pathlib import Path\n", + "import shutil\n", + "\n", + "phash = PHash()\n", + "gis = ImageSignature()\n", + "\n", + "\n", + "def similarity_matching(sprites_data: dict, project_folder: str, top_k: int = 1, min_similarity: float = None) -> str:\n", + " print(\"🔍 Running similarity matching…\")\n", + " os.makedirs(project_folder, exist_ok=True)\n", + "\n", + " backdrop_base_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\"\n", + " sprite_base_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\"\n", + " code_blocks_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\"\n", + " out_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\"\n", + " \n", + "\n", + " project_json_path = os.path.join(project_folder, \"project.json\")\n", + "\n", + " # -------------------------\n", + " # Build sprite images list (BytesIO) from sprites_data\n", + " # -------------------------\n", + " sprite_ids, sprite_base64 = [], []\n", + " for sid, sprite in sprites_data.items():\n", + " sprite_ids.append(sid)\n", + " sprite_base64.append(sprite[\"base64\"])\n", + "\n", + " sprite_images_bytes = []\n", + " sprite_b64_clean = [] # <<< new: store cleaned base64 strings\n", + " for b64 in sprite_base64:\n", + " # remove possible \"data:image/..;base64,\" prefix\n", + " raw_b64 = b64.split(\",\")[-1]\n", + " sprite_b64_clean.append(raw_b64)\n", + "\n", + " # decode into BytesIO for local processing\n", + " img = Image.open(BytesIO(base64.b64decode(raw_b64))).convert(\"RGB\")\n", + " buffer = BytesIO()\n", + " img.save(buffer, format=\"PNG\")\n", + " buffer.seek(0)\n", + " sprite_images_bytes.append(buffer)\n", + " \n", + " def hybrid_similarity_matching(sprite_images_bytes, sprite_ids, min_similarity=None, top_k=5, method_weights=(0.5,0.3,0.2)):\n", + " from PIL import Image\n", + " # Local safe defaults\n", + " embeddings_path = os.path.join(out_path, \"hybrid_embeddings.json\")\n", + " hash_path = os.path.join(out_path, \"phash_data.json\")\n", + " signature_path = os.path.join(out_path, \"signature_data.json\")\n", + "\n", + " # Load embeddings\n", + " embedding_json = {}\n", + " if os.path.exists(embeddings_path):\n", + " with open(embeddings_path, \"r\", encoding=\"utf-8\") as f:\n", + " embedding_json = json.load(f)\n", + "\n", + " # Load phash data (if exists) -> ensure hash_dict variable exists\n", + " hash_dict = {}\n", + " if os.path.exists(hash_path):\n", + " try:\n", + " with open(hash_path, \"r\", encoding=\"utf-8\") as f:\n", + " hash_data = json.load(f)\n", + " for path, hash_str in hash_data.items():\n", + " try:\n", + " hash_dict[path] = hash_str\n", + " except Exception:\n", + " pass\n", + " except Exception:\n", + " pass\n", + "\n", + " # Load signature data (if exists) -> ensure signature_dict exists\n", + " signature_dict = {}\n", + " sig_data = {}\n", + " if os.path.exists(signature_path):\n", + " try:\n", + " with open(signature_path, \"r\", encoding=\"utf-8\") as f:\n", + " sig_data = json.load(f)\n", + " for path, sig_list in sig_data.items():\n", + " try:\n", + " signature_dict[path] = np.array(sig_list)\n", + " except Exception:\n", + " pass\n", + " except Exception:\n", + " pass\n", + "\n", + " # Parse embeddings into lists\n", + " paths_list = []\n", + " embeddings_list = []\n", + " if isinstance(embedding_json, dict):\n", + " for p, emb in embedding_json.items():\n", + " if isinstance(emb, dict):\n", + " maybe_emb = emb.get(\"embedding\") or emb.get(\"embeddings\") or emb.get(\"emb\")\n", + " if maybe_emb is None:\n", + " continue\n", + " arr = np.asarray(maybe_emb, dtype=np.float32)\n", + " elif isinstance(emb, list):\n", + " arr = np.asarray(emb, dtype=np.float32)\n", + " else:\n", + " continue\n", + " paths_list.append(os.path.normpath(str(p)))\n", + " embeddings_list.append(arr)\n", + " elif isinstance(embedding_json, list):\n", + " for item in embedding_json:\n", + " if not isinstance(item, dict):\n", + " continue\n", + " p = item.get(\"path\") or item.get(\"image_path\") or item.get(\"file\") or item.get(\"filename\") or item.get(\"img_path\")\n", + " emb = item.get(\"embeddings\") or item.get(\"embedding\") or item.get(\"features\") or item.get(\"vector\") or item.get(\"emb\")\n", + " if p is None or emb is None:\n", + " continue\n", + " paths_list.append(os.path.normpath(str(p)))\n", + " embeddings_list.append(np.asarray(emb, dtype=np.float32))\n", + "\n", + " if len(paths_list) == 0:\n", + " print(\"⚠ No reference images/embeddings found (this test harness may be running without data)\")\n", + " # Return empty results gracefully\n", + " return [[] for _ in sprite_images_bytes], [[] for _ in sprite_images_bytes], []\n", + "\n", + " ref_matrix = np.vstack(embeddings_list).astype(np.float32)\n", + " \n", + " # Batch: Get all sprite embeddings, phash, sigs first\n", + " sprite_emb_list = []\n", + " sprite_phash_list = []\n", + " sprite_sig_list = []\n", + " per_sprite_final_indices = []\n", + " per_sprite_final_scores = []\n", + " per_sprite_rerank_debug = []\n", + " for i, sprite_bytes in enumerate(sprite_images_bytes):\n", + " sprite_pil = Image.open(sprite_bytes)\n", + " enhanced_sprite = process_image_cv2_from_pil(sprite_pil, scale=2) or sprite_pil\n", + " # sprite_emb = get_dinov2_embedding_from_pil(preprocess_for_model(enhanced_sprite)) or np.zeros(ref_matrix.shape[1])\n", + " # sprite_emb_list.append(sprite_emb)\n", + " sprite_emb = get_dinov2_embedding_from_pil(preprocess_for_model(enhanced_sprite))\n", + " sprite_emb = sprite_emb if sprite_emb is not None else np.zeros(ref_matrix.shape[1])\n", + " sprite_emb_list.append(sprite_emb)\n", + " # Perceptual hash\n", + " sprite_hash_arr = preprocess_for_hash(enhanced_sprite)\n", + " sprite_phash = None\n", + " if sprite_hash_arr is not None:\n", + " try: sprite_phash = phash.encode_image(image_array=sprite_hash_arr)\n", + " except: pass\n", + " sprite_phash_list.append(sprite_phash)\n", + " # Signature\n", + " sprite_sig = None\n", + " embedding_results, phash_results, imgmatch_results, combined_results = run_query_search_flow(\n", + " query_b64=sprite_b64_clean[i],\n", + " processed_dir=out_path,\n", + " embeddings_dict=embedding_json,\n", + " hash_dict=hash_data,\n", + " signature_obj_map=sig_data,\n", + " gis=gis,\n", + " phash=phash,\n", + " MAX_PHASH_BITS=64,\n", + " k=5\n", + " )\n", + " # Call the advanced re-ranker\n", + " rerank_result = choose_top_candidates(embedding_results, phash_results, imgmatch_results,\n", + " top_k=top_k, method_weights=method_weights, verbose=True)\n", + " per_sprite_rerank_debug.append(rerank_result)\n", + "\n", + " # Selection logic: prefer consensus, else weighted top-1\n", + " final = None\n", + " if len(rerank_result[\"consensus_topk\"]) > 0:\n", + " consensus = rerank_result[\"consensus_topk\"]\n", + " best = max(consensus, key=lambda p: rerank_result[\"weighted_scores_full\"].get(p, 0.0))\n", + " final = best\n", + " else:\n", + " final = rerank_result[\"weighted_topk\"][0][0] if rerank_result[\"weighted_topk\"] else None\n", + "\n", + " # Store index and score for downstream use\n", + " if final is not None and final in paths_list:\n", + " idx = paths_list.index(final)\n", + " score = rerank_result[\"weighted_scores_full\"].get(final, 0.0)\n", + " per_sprite_final_indices.append([idx])\n", + " per_sprite_final_scores.append([score])\n", + " print(f\"Sprite '{sprite_ids}' FINAL selected: {final} (index {idx}) score={score:.4f}\")\n", + " else:\n", + " per_sprite_final_indices.append([])\n", + " per_sprite_final_scores.append([])\n", + "\n", + " return per_sprite_final_indices, per_sprite_final_scores, paths_list#, per_sprite_rerank_debug\n", + "\n", + " # Use hybrid matching system\n", + " per_sprite_matched_indices, per_sprite_scores, paths_list = hybrid_similarity_matching(\n", + " sprite_images_bytes, sprite_ids, min_similarity, top_k, method_weights=(0.5, 0.3, 0.2)\n", + " )\n", + "\n", + " # =========================================\n", + " # Copy matched sprite assets + collect data\n", + " # =========================================\n", + " project_data = []\n", + " backdrop_data = []\n", + " copied_sprite_folders = set()\n", + " copied_backdrop_folders = set()\n", + "\n", + " matched_indices = sorted({idx for lst in per_sprite_matched_indices for idx in lst})\n", + " print(\"matched_indices------------------>\",matched_indices)\n", + "\n", + " sprite_base_p = Path(sprite_base_path).resolve(strict=False)\n", + " backdrop_base_p = Path(backdrop_base_path).resolve(strict=False)\n", + " project_folder_p = Path(project_folder)\n", + " project_folder_p.mkdir(parents=True, exist_ok=True)\n", + " \n", + " def display_like_windows_no_lead(p: Path) -> str:\n", + " s = p.as_posix()\n", + " if s.startswith(\"/\"):\n", + " s = s[1:]\n", + " return s.replace(\"/\", \"\\\\\")\n", + " \n", + " def is_subpath(child: Path, parent: Path) -> bool:\n", + " try:\n", + " child.relative_to(parent)\n", + " return True\n", + " except Exception:\n", + " return False\n", + "\n", + " # Copy assets and build project data (unchanged from your version)\n", + " for matched_idx in matched_indices:\n", + " if not (0 <= matched_idx < len(paths_list)):\n", + " print(f\" ⚠ matched_idx {matched_idx} out of range, skipping\")\n", + " continue\n", + " matched_image_path = paths_list[matched_idx]\n", + " matched_path_p = Path(matched_image_path).resolve(strict=False)\n", + " matched_folder_p = matched_path_p.parent\n", + " matched_filename = matched_path_p.name\n", + " matched_folder_display = display_like_windows_no_lead(matched_folder_p)\n", + " print(f\"Processing matched image: {matched_image_path}\")\n", + " print(f\" - Folder: {matched_folder_display}\")\n", + "\n", + " folder_key = matched_folder_p.as_posix()\n", + "\n", + " # SPRITE\n", + " if is_subpath(matched_folder_p, sprite_base_p) and folder_key not in copied_sprite_folders:\n", + " print(f\"Processing SPRITE folder: {matched_folder_display}\")\n", + " copied_sprite_folders.add(folder_key)\n", + " sprite_json_path = matched_folder_p / \"sprite.json\"\n", + " if sprite_json_path.exists() and sprite_json_path.is_file():\n", + " try:\n", + " with sprite_json_path.open(\"r\", encoding=\"utf-8\") as f:\n", + " sprite_info = json.load(f)\n", + " project_data.append(sprite_info)\n", + " print(f\" ✓ Successfully read sprite.json from {matched_folder_display}\")\n", + " except Exception as e:\n", + " print(f\" ✗ Failed to read sprite.json in {matched_folder_display}: {repr(e)}\")\n", + " else:\n", + " print(f\" ⚠ No sprite.json in {matched_folder_display}\")\n", + " try:\n", + " sprite_files = list(matched_folder_p.iterdir())\n", + " except Exception as e:\n", + " sprite_files = []\n", + " print(f\" ✗ Failed to list files in {matched_folder_display}: {repr(e)}\")\n", + " print(f\" Files in sprite folder: {[p.name for p in sprite_files]}\")\n", + " for p in sprite_files:\n", + " fname = p.name\n", + " if fname in (matched_filename, \"sprite.json\"):\n", + " continue\n", + " if p.is_file():\n", + " dst = project_folder_p / fname\n", + " try:\n", + " shutil.copy2(str(p), str(dst))\n", + " print(f\" ✓ Copied sprite asset: {p} -> {dst}\")\n", + " except Exception as e:\n", + " print(f\" ✗ Failed to copy sprite asset {p}: {repr(e)}\")\n", + "\n", + " # BACKDROP\n", + " if is_subpath(matched_folder_p, backdrop_base_p) and folder_key not in copied_backdrop_folders:\n", + " print(f\"Processing BACKDROP folder: {matched_folder_display}\")\n", + " copied_backdrop_folders.add(folder_key)\n", + " backdrop_src = matched_folder_p / matched_filename\n", + " backdrop_dst = project_folder_p / matched_filename\n", + " if backdrop_src.exists() and backdrop_src.is_file():\n", + " try:\n", + " shutil.copy2(str(backdrop_src), str(backdrop_dst))\n", + " print(f\" ✓ Copied matched backdrop image: {backdrop_src} -> {backdrop_dst}\")\n", + " except Exception as e:\n", + " print(f\" ✗ Failed to copy matched backdrop image {backdrop_src}: {repr(e)}\")\n", + " else:\n", + " print(f\" ⚠ Matched backdrop source not found: {backdrop_src}\")\n", + " try:\n", + " backdrop_files = list(matched_folder_p.iterdir())\n", + " except Exception as e:\n", + " backdrop_files = []\n", + " print(f\" ✗ Failed to list files in {matched_folder_display}: {repr(e)}\")\n", + " print(f\" Files in backdrop folder: {[p.name for p in backdrop_files]}\")\n", + " for p in backdrop_files:\n", + " fname = p.name\n", + " if fname in (matched_filename, \"project.json\"):\n", + " continue\n", + " if p.is_file():\n", + " dst = project_folder_p / fname\n", + " try:\n", + " shutil.copy2(str(p), str(dst))\n", + " print(f\" ✓ Copied backdrop asset: {p} -> {dst}\")\n", + " except Exception as e:\n", + " print(f\" ✗ Failed to copy backdrop asset {p}: {repr(e)}\")\n", + " pj = matched_folder_p / \"project.json\"\n", + " if pj.exists() and pj.is_file():\n", + " try:\n", + " with pj.open(\"r\", encoding=\"utf-8\") as f:\n", + " bd_json = json.load(f)\n", + " for tgt in bd_json.get(\"targets\", []):\n", + " if tgt.get(\"isStage\"):\n", + " backdrop_data.append(tgt)\n", + " except Exception as e:\n", + " print(f\" ✗ Failed to read project.json in {matched_folder_display}: {repr(e)}\")\n", + "\n", + " # Final project JSON creation (same as your code)\n", + " final_project = {\n", + " \"targets\": [], \"monitors\": [], \"extensions\": [],\n", + " \"meta\": {\n", + " \"semver\": \"3.0.0\",\n", + " \"vm\": \"11.3.0\",\n", + " \"agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36\"\n", + " }\n", + " }\n", + " for spr in project_data:\n", + " if not spr.get(\"isStage\", False):\n", + " final_project[\"targets\"].append(spr)\n", + " if backdrop_data:\n", + " all_costumes, sounds = [], []\n", + " seen_costumes = set()\n", + " for i, bd in enumerate(backdrop_data):\n", + " for costume in bd.get(\"costumes\", []):\n", + " key = (costume.get(\"name\"), costume.get(\"assetId\"))\n", + " if key not in seen_costumes:\n", + " seen_costumes.add(key)\n", + " all_costumes.append(costume)\n", + " if i == 0:\n", + " sounds = bd.get(\"sounds\", [])\n", + " stage_obj={\n", + " \"isStage\": True,\n", + " \"name\": \"Stage\",\n", + " \"objName\": \"Stage\",\n", + " \"variables\": {}, \n", + " \"lists\": {}, \n", + " \"broadcasts\": {},\n", + " \"blocks\": {}, \n", + " \"comments\": {},\n", + " \"currentCostume\": 1 if len(all_costumes) > 1 else 0,\n", + " \"costumes\": all_costumes,\n", + " \"sounds\": sounds,\n", + " \"volume\": 100, \n", + " \"layerOrder\": 0,\n", + " \"tempo\": 60, \n", + " \"videoTransparency\": 50,\n", + " \"videoState\": \"on\",\n", + " \"textToSpeechLanguage\": None\n", + " }\n", + " final_project[\"targets\"].insert(0, stage_obj)\n", + " else:\n", + " logger.warning(\"⚠️ No backdrop matched. Using default static backdrop.\")\n", + " default_backdrop_path = BACKDROP_DIR / \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + " default_backdrop_name = \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + " default_backdrop_sound = BACKDROP_DIR / \"83a9787d4cb6f3b7632b4ddfebf74367.wav\"\n", + " default_backdrop_sound_name = \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + " try:\n", + " shutil.copy2(default_backdrop_path, os.path.join(project_folder, default_backdrop_name))\n", + " logger.info(f\"✅ Default backdrop copied to project: {default_backdrop_name}\")\n", + " shutil.copy2(default_backdrop_sound, os.path.join(project_folder, default_backdrop_sound_name))\n", + " logger.info(f\"✅ Default backdrop sound copied to project: {default_backdrop_sound_name}\")\n", + " except Exception as e:\n", + " logger.error(f\"❌ Failed to copy default backdrop: {e}\")\n", + " stage_obj={\n", + " \"isStage\": True,\n", + " \"name\": \"Stage\",\n", + " \"objName\": \"Stage\",\n", + " \"variables\": {},\n", + " \"lists\": {},\n", + " \"broadcasts\": {},\n", + " \"blocks\": {},\n", + " \"comments\": {},\n", + " \"currentCostume\": 0,\n", + " \"costumes\": [\n", + " {\n", + " \"assetId\": default_backdrop_name.split(\".\")[0],\n", + " \"name\": \"defaultBackdrop\",\n", + " \"md5ext\": default_backdrop_name,\n", + " \"dataFormat\": \"svg\",\n", + " \"rotationCenterX\": 240,\n", + " \"rotationCenterY\": 180\n", + " }\n", + " ],\n", + " \"sounds\": [\n", + " {\n", + " \"name\": \"pop\",\n", + " \"assetId\": \"83a9787d4cb6f3b7632b4ddfebf74367\",\n", + " \"dataFormat\": \"wav\",\n", + " \"format\": \"\",\n", + " \"rate\": 48000,\n", + " \"sampleCount\": 1123,\n", + " \"md5ext\": \"83a9787d4cb6f3b7632b4ddfebf74367.wav\"\n", + " }\n", + " ],\n", + " \"volume\": 100,\n", + " \"layerOrder\": 0,\n", + " \"tempo\": 60,\n", + " \"videoTransparency\": 50,\n", + " \"videoState\": \"on\",\n", + " \"textToSpeechLanguage\": None\n", + " }\n", + " final_project[\"targets\"].insert(0, stage_obj)\n", + "\n", + " with open(project_json_path, 'w') as f:\n", + " json.dump(final_project, f, indent=2)\n", + "\n", + " return project_json_path" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "id": "293944f6", + "metadata": {}, + "outputs": [], + "source": [ + "# Replace this with the actual path to your PDF file\n", + "pdf_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\test_samp\\shared image_5.pdf\"\n", + " \n", + "# Open and read the PDF file as bytes, then wrap it in a BytesIO stream\n", + "with open(pdf_path, \"rb\") as pdf_file:\n", + " pdf_bytes = pdf_file.read()\n", + " pdf_stream = io.BytesIO(pdf_bytes) " + ] + }, + { + "cell_type": "markdown", + "id": "6a224056", + "metadata": {}, + "source": [ + "## updated extract image module" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "7ca0223c", + "metadata": {}, + "outputs": [], + "source": [ + "def extract_images_from_pdf(pdf_stream, output_dir):\n", + " manipulated_json = {}\n", + " try:\n", + " pdf_id = uuid.uuid4().hex\n", + " elements = partition_pdf(\n", + " file=pdf_stream,\n", + " strategy=\"hi_res\",\n", + " extract_image_block_types=[\"Image\"],\n", + " hi_res_model_name=\"yolox\",\n", + " extract_image_block_to_payload=False,\n", + " extract_image_block_output_dir=output_dir,\n", + " )\n", + " file_elements = [element.to_dict() for element in elements]\n", + " sprite_count = 1\n", + " for el in file_elements:\n", + " img_path = el[\"metadata\"].get(\"image_path\")\n", + "\n", + " # ✅ skip if no image_path was returned\n", + " if not img_path:\n", + " continue\n", + "\n", + " with open(img_path, \"rb\") as f:\n", + " base_file = base64.b64encode(f.read()).decode(\"utf-8\")\n", + "\n", + " image_uuid = str(uuid.uuid4())\n", + " manipulated_json[f\"Sprite {sprite_count}\"] = {\n", + " \"base64\": base_file,\n", + " \"file-path\": img_path,\n", + " \"pdf-id\": pdf_id,\n", + " \"image-uuid\": image_uuid,\n", + " }\n", + "\n", + " sprite_count += 1\n", + "\n", + " return manipulated_json\n", + " except Exception as e:\n", + " raise RuntimeError(f\"❌ Error in extract_images_from_pdf: {str(e)}\")" + ] + }, + { + "cell_type": "markdown", + "id": "688f1e1b", + "metadata": {}, + "source": [ + "## testing" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "id": "0bbad256", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Warning: No languages specified, defaulting to English.\n" + ] + } + ], + "source": [ + "project_folder= r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\"\n", + "output_path = extract_images_from_pdf(pdf_stream,project_folder)" + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "id": "3320eff3", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'Sprite 1': {'base64': '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAE8AaIDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwB2sMH8c6SHJwR07HmuqVEIyY0/KuV1na3jPRiOoP8AUV14AoKIxGpP3Vx9KXy0wflXH0p+OaKTAjEaf3F/KlKKP4F/IU8CgikAzy0P8C/kKPKT+4v5Cn0UwGiNB0RfyFHlr12r+Qp1JmmAnlqeqqfwFJsTP3F/IU8UnegBPLT+4v5Uvlp/dX8hS0tADfLX+6v5CgRx/wBxc/QU7tQKAE8tf7qf980mxP7i/lS5NLSATYp6ovH+yKXy0A+4v5ClFKelICPy0J5RfyFKI0HRE/75paUUAIVH91P++aUIh6qv5UtOGMUAM8tOyL+VKEUdh+Qp1FMBpAHQL/3yKTA7hT/wEU40lFwE2of4F/IUFExzGn5U8AUHHrQgGCNO0aD8KDGn9xfypwGKWmFxgjTsi/lQUT+4v5U48UlBQgjT+4v5U7Yn9xfyoFLQAmxO6J+VV5NOtJJC720TMe5WrVFAFVdLsAB/oUGfXbQ2mWDnmzg/75qzmjNAFYaXYDpaRf8AfNH9n2Qb/j0i/wC+as5pO9FwIf7Pse1pF/3zR9htf+feL/vmrFFAEP2Cz/59os/7tAsrQdbeP/vkVLmloAgNnbH/AJYR+3y077JbYGbeHj0SpM0ZoAj+y23a3i/75FAtLb/nhHn/AHRUtJnmgBv2eHGPJix/uVm69DENAvx5UeBC3RcGtXNZmvZ/sC//AOuLUCOJsYV/s+2/65L/ACFFSWP/ACD7b/rkv8hRQK5f1wbPGWiFe55/MV2Fch4gGPF+jlSeHx9ORXYADaKAEooPSkFIBaWgUUgENFLRTsA00o6UUlMLC0UCloCwlIc0tL2oAaOaeBSAgUu4UAIaUdKSnDpUgFFA6+v40oGD2/OnYBMUAU/Bzgcn2XP60u0jlgVFFgG4pMU7+E4z7E8Cj5ScbgTjnB6UWASlGDSr97lcnsO1cv4r8a2XhiIx5WW7boueAaQ7HUFDjpxTGdEA+dR+NeD3/wARPEl5KSt0YI+yx1kTeKNalzv1GdifenYLH0cs8bHaJBn60/jPUNXzMNd1RW3G/mU+zVft/GviK2UCHVJ+Om5hj+VAWPorA7UYrxzQvivqNvME1dFnh43SKPmWvV9J1W01mzW7s5Q8LdOeQaYWLmKSnnOaaetAxKKMUvGKAEooooAKKKKAFooooAQ07tSUUAJRS0lABRRRSuAtJQTQOlMBDVHXf+Rev/8Ari1X+1UdcGdA1Af9MGoJZxFj/wAg+2/65L/IUUWIP9n23/XJf5CigDW13K+K9HbH3pMfqK6vtiuV19/+Ko0Re/mf1FdVQA00Cl60YpAKDijNJRQgFopAaXNMBKWkpaCgooooATFKOlFFAmGM0baUUtAhtOA6DPWjFHBLZ/u8AetSArlIonklZVVBliewry3xD8W5La6e30i2i/dnBkf5g34U74l+MPKzoljJ+8A/fSA8fSvJSgxvGT65qkB0t9498RX1z5/294Qf4IuFqsPFOv8A3zqUxPuaxduQMDFSovB9utAHV2PxH8Q2yFPtCS5x8rr1/Wur8P8AxK1bVboWklhHcE8fuvlK/XrXD+HfDF14ivkggGAMEtjoK9u8P+FbDw9b5gjDXBGHnI5PtSA57xX41uNL0544ozBcN8pLHOK8Wubya+uXmuZTKzHOWru/ijKE1x4DndnJHpXABQBk/TNIoRj6dO9AGR8tA6nAH0Nadjot9exebBayMg7qpNPbcDLK4GcZppAP8I/Cr8llMkjI6MrDsRUDQsnWi6AZHwQRkt2z0rr/AAV4nl0DUgCSbVyBIhPQetckoYsBwDUyja3X6+9MR9PxyxzxrLE26NwGVvWlI5rlvh/qUmo+F4UlHMGYwfaupJoGNpMc0tFACd6Onel70HHpQAnagUvGKBQAUUbaMUAFITinCmnrQIAc0U4CkPWkxgODSmgDmlOKQDCKBSmlAqgEIwKo62P+JDqB/wCmDfyrQOMVna8dugahj/ng1BLOLsYj9gtvm/5ZL/IUUWLf8S+2/wCuS/yFFAGpr23/AIS7RPXzP6iurPSuR15c+NtHGeAcj8xXXkcUAMFLSgc0YpMBKQ9KdigjihAhg60tGKKZQo60HrRS4oASinYFGBQA2lHSlwKTmkxMWigD1pcUhBioL2dbSznnkIVI4yc1YzxWD41fy/B2ov0/dn+VCA+fr+4e7vZppWLSM5JJqrntTywZ2YjvTQQWzjirAaWIYelW4Q0hOBxx+NV8ZPPTtWjpMLSXcaLnOQMe56UMR7J8MNMNnoMtzIm2eWTCk9lHT+ddZfXaWrfMTtbj6H1pbS3W0sYbdB8qKBVDUhk5rGRrE898beG7vW7+S9gHmZHTvivOZbKeO6a08siToFP96vdcbWz3rmpvDUc+vrfHARW3Y96y9s0bKmmcDqHhm70+O3aVMvKOR6V6j4dszpugwRMAJW5II7VJf6dFqMkckgBKHr61ongBRg4XAJrOpVci1TSM240vSrqZleFWlPJxWDfeCLa43NA21j/Ce1bP9m3A1X7QHAX0rRZiAAazjUcSnTTR5nP4G1SNyRDuUdCKypdHvrWXabdj9BXsakspHHPc1XtIJBPJ5m0qemRWyxD2MnRRW+F8c0OmXccylRkbAf1/pXeGubsLn7PexQogCynoO1dHya6ISujGcbMUUUlAqiRcU2nUmKAEo70oFLgZoAKKdgUYFADaKU0lBLCiiigEFIacKMCgoZinDpTsCkA5oAaRxWfrwx4f1Enp5DVpcCs3xAM+HNSGesDD9KtbEs4qxcf2fbf9cl/kKKmsLZf7Otuf+WS/yFFMC9rJU+NdIGPT+ddaa5TV9v8AwmmkH6fzrq6kBMUtFFIApc+1JRSAKQ9KWkPSncLiDmlpBTqLhcAM0Uoo70XC4YoxS0UAGKKKKQDuMYrmvHwZvBt+EBJEZ49a6PNVtStPtmmT2rgETKfwoA+YZEHmMAcU3HAArT1Cxks7h4ZFKurHII5qqiN1xVpgNVTwCPxruPhzpJvvEMTPHuhjIdmx0x0rkIYWa4SJPmlc4UV734E0X+xdAhNwmLqZt8mRyB6UmwsdCxJ9vT6VWu7cTx8feFWCDnk9OB9KCeKykrlJ2MGaylVS2OlUvL4Oa6dl3oVINYl9bGKTjoa55xsdFOXcpgiMYxxWZrOv2WiFPPDMW6YrVK++D2Nc54o0N9Wtd0BBlXru7/SsVubsv2PiTS71Ri6SNj/C5xWiGWVPMQqyeqnIrxptHvklMYgcSA9gTXoHg6y1K1Dfbd4gI+VWGKuVkgudMcDily2ABS/KFIPLZ61b0+28y5yfuCpgrsU5JDdOs3N0J5FIC/droQcjOKYUwAFGAKcOldkFY5Ju4vUUg60vY0g61oZoWjNFFAxcUYpF706gAooozQAhGaMUtFAhCMUlKaSgAzilBzSUDrQMdR0ooPSgBpGazvEAz4c1D2hb+VaQ61na/wD8i5qP/XBv5VUSWchYk/2fbcf8sl/kKKSxP/Evtuf+WS/yFFUBoa0UXxdovqT/AFFdYK5XXQD4t0Q99+P1FdWRipAQdaWkHWlpMApvenUYpANxmjFKRRQAYpaKKACiiloAKa1OoxQA30pR0oIooHYWnK2CCBz70ylGaVwsZeq+GdJ1VvMuoFMhzyo/nXN/8KysTOzLLiM9EruQ2DR8uCTTvoMytK8LaRpiLJFaI0i8EsM4rXOAwC8jtVK81Wz09lNzOEJ6UWurWN9Lst7hGb0zzS5kU4u1y4eTSEc0pKhiScY96jadCfvD86HF7mXtI2vckqGeBJYzkc9qkEkbLw4z9aXIGMHcT3qJLmKhUTMC5gKdRVbbXRzW8cvEmCfaqUml5PyGueVN3OqNRbGP5ag58rPvilDYOOgrS/siTOc1Yh0pMZl+Y0Km+o3VRn21qZHyRxWzBCsKkAc06OFY8BBipSDW9OmkYVJ32Gc04dKKcBxWpmJR3p2BSYoGLSYo5pRQAgFKelFB6UAIKD1ooxQACloooAKKKKACiik70ALSZ7UtGKAEAwaz9f8A+Rf1D/ri1aVZ+vD/AIp/UeM/uG4qo7ks4ywx/Z9t/wBcl/kKKr2LN/Z9t/1yX+QoqgNrXSD4r0Uj+/n9RXV9q5PXM/8ACXaKMD739RXWVICAUtFGRSAKKKKQBS44pKcOlADcUUtJQAClpBS0AFFFFAIKTFLSEgUMYUmTnpSO6ou41mXepH7sdZSqcppGncvTXkUPBcZ9KptqwOdq5rIkcu25jnNMyQOKwlXbdjpjSSRQ8VTvebCyrgVz9tM8VwCp28fw8Vu6urPDmuaJK3K5HHc06TblqbV7KhI1F1C7wc3EhH1o+23B/wCWz/nVYjHGMUgr2Ekoo/P5VakZvUsi+uV/5bP+dWoNevocBZenTPNZpNA6Zo9mhxxFZdTp4PGU8eBPGH9xxW3b+J7KbAZtjHsa86J+cc07JKZzUSop6nVTx9WOkj12N0lUMjZUjOQaUElMr3rzzSvEt1YFUdQ8I4x3rsbDXbK+j+QiNj2JrCVOx61DGQqaLc0Dk4GelKc0qjeARz70rL2qLWOxajM0ob2oxt60Y74oAdRSCloAKKKKACiiigAooozQMKKKD0oAQnFKORSDnrS0AFJjmlooAKCcUUhoADkjiqOvBv7A1DB/5YN/Kr4xjms/XyBoWocnHkNTiSzhrFj/AGfbcf8ALJf5CimWGP7Ptv8Arkv8hRVAbmsn/istGH+3/UV1YGK5TWjnxlpHHIOf1FdcRSAaTTM89KkIxSUgEpaB1oNIBDThSUUAOpvenCjAoAbR3pSKMUALTT1p1JjmkwE6A1HI6om5qe/TrisrUbjgIGFZ1JWNKcbshvr7zAY0yPeqCggcnJpzNkUg5HSuOU22dcYJDWGRSFcrTwpY4AyatRadNKATlaEpMtuKMTVUb7ISOwrkZkZkDZ5zXpt/pBfT3C/M4FcKmkXckywmF+W5OK2pxfNqXKUJUpR8iHBABznIoXOa6O+8MS2dmZ+iheSa54Yxwa9qOsT8/rwcKrixD1pRyKQinDpVmQxkJxRggVJk0maTJbGDNMlklT5o2aMjuO9SkYIpswG3rWclcunNqV0bOkeMLuzXZcgzRjpjqK6nTvFmn6g2M+Uw67jXmi8NxUcxcQsI2Ks3GRXNV909bCYmpKoo3PZoby3uM+VMrkdgalL5HNeW6BPNb3sJjclTwwzXpqSxuBtYFu4rBVLn0VWjybE4paap9aNwziquc1h1JTuPSkouOwlFDHAJpN2MZ4NFwsOxTcUpJHOaQHNAxQKWiimAUhoooAQdaWlwKKACg9KBTsUAMGcjFUNfGfDuo5/54N/KtHAzWfrwH/CP6h/1xagDgrEf8S+2/wCuS/yFFTWKD+z7bn/lkv8AIUUAaWsOf+E40lfXH867LGSa47VgD450r6A/rXYiqJEIpMUp60HpSYCAUh604UuKQDKKdijFACDrTiKaOtONAABRijtS0AJikx706mNwD69aTY0rkNzKIoiT3rnpHLyFiBg1f1WUmUIDxiqG35a5KjbZ1U42GnBFKil2CrQiFnwOa2rGxCLvcYJ7GojBtlTlYSzs0UbmGTV7PQYpQABgU4DvXZGKSOaUm2Js+bPb0pBFGMthfyp9NI+V/YZqrak8/Lqct4zvnigis0f7+SwrjEwuABxWn4gvPtmryPu4AwBWcBivQpqyPl8bU56zbAjNNIOafmjIqzjGUuM0pINLxigLC8EDPFRSpuUkVKMdxmkkYBDxipaFHcogdaY+5XRV5IOakyNxBznrRDtecn06Vw4mVz2copOpVuzY0WJ2u9+0bQORXTpO8bblJFZeiw7LVpD1NX+2K867WzPsKlm9jUh1YoQHGffNXRqcDY+YA1z2KcFUdaFWa0OZ0U3odOlzC4yJFp4lQj74rl9+3gHH404SPj75q/bi9ib13ew28e5mHsKzF1IXF7ktiMDpVCVGl4Yk/WmwxbWJ4FHt77C9kjf+2o7BY2xj1q1HJvP9awLWN2n+UZrdhhxjdkHvXTBtrUxmknoWSMAUlGAOhoqyA20Yp1N70AFFFFAAKXNIKdigBOprN1/nw7qOB/ywatI8Vna78vh/UCf+eLUCOLsP+Qdbcf8ALJP5Cin2I/4l9tx/yyX+QooFc0dZ2nxxpAHB2c/nXYEYOK5DVdp8e6WGXnZ/Wuw4A96oBhpO9KaTvUgOooooAQijFLRzQAuKKM0UAIaTvS4yaOg4oAXFRTsiR5Jwe/0qTJA61m6nMRhR0IwazqPQ0gtTKkJaZ2JyM8fSmHJ4FKckZNW7GFZpcEcCue3MzpTUUTaZbc75B9K18jIA6U1ECDGOKdjniumEUkc85XAj5qCecUvNM5zVN2M7jwKoa3ciz0uWTdhiMCtDPSuJ8a3padLNWONuSK1pq7OfFzUKbRypDM5duSTmnZyKXrj0x0pMCuxHysrthRS44pQBtqhDD1pR0pcDFOwMUrgIB71PbgNOqlcg1DipIXKyAg4ORSew4L31c9B/4R3T7m3iLwgMU6iuVvvDkyXpjtoCEz96u7tWzZw55OwVIzc1501zM+uwahSSlFHNDTpre3jRFyAOaa1rOqlih5rpVUYx2NDIrDBwRXO8Mmdv1iVzmFiYqdymm7SAcg10wgi/uCl+ywt1jWp+r2B12zmDGxAP9KesbMPunjvXTfZYsY8taBDGuQEFV7BC9uzDisZZQMd/WtC20yJD+95NXgoA4FOXitFRijN1GxqRpGNqIAPWjGDTsDNNY4NapWRD1Y4Up6U0GnDpQJsP4aYM7jTjQBQADrS0UHpQAUmPel7UmKAuGOKpa4B/YGo7+nkN/KrpHeqXiBS/h3UeePs7fyoEefWUkv2C2wePKX+QopLAn+zrX/rkn8hRQB0GrYPj7S17iPr+NdcRya5HUVJ+IWn45xH/AFrrx61TASkxS96KkBMUu2inDpQAgFFLSd6ADFJTqMUANAzS4paKAI3ICEnjFYl2DJJvKnb2ralTIIPeqVxCW2oOgqWrotOxkxpvfaBV7SQTcS4GNvFWYrJVJPTFLY2n2eSV+fmNZxiXKRexxQPpR2oFaIhikU3GSBUlNIqmiOomOFJ75J9q8w167W71meUDgHavNega3fLp+ntIMksCF968vPzOWPc5rekjxs0qu9kKre1L1pM05a6UeNbqGOMUgOBikYkU0ZzmqAkpegpqtmlzSYBup8fX8RUXU0oyJAO1J7FR+JHqthzZxf7gqwwBqppR36dCc/wireOa897n1lD+GhAoBo20veig2ExilBpDRTsA4ZPejFApaAE6UClo4pAFIee1LSZzxQAYFL0pNtLtoJYHmiiigEFBHFApaBidsUUUUCA9Koa5xoGo5OB5DZ/Kr4qh4g40HUMj5fIbP5UAcJYxp/Z9t+8H+qXt7CioLFl+wW2Bx5S/yFFAG/qLGP4iaePWIfzNdn/DXF6od3xF032iH8zXZ5NUwCmmnUnepAbzRk0+jAzQA1TmnUuAMUEUAJRS03nd7UAKKWiigBppuxc5xzTjSigEJjihRTjSigYlAoNAoEIetHTmlxzRwGGelHUL2TOO8Z3XzQWyN9zkiuRHTpWt4iuBcavIV6jjNZY4FdtONkfLYypzVGhuAe1Lg0N1FOB45FWchGU96AOKc2O1A6VQDCOcCgginjrmhjk0mF2IF70nO6nfw9aOgGT1NBpG90ek+HJPM0iM1qDrWP4YIOixgHPJzWw3FcE9z6rD/wAFCHrSUUVJ0BjNGKKM0AKKWk7Uo6UAB6UgoNAoAWlx3pKXtQAZo5pDx0pATmglgaUdKO/SnAUAhopaUgUnegYd6CKO9BzQIUAbeaoa4C2hagPWBv5Ve5NVda40LUD38hv5UAeW2UjCwtxj/lkv8hRT7L/jwt/+uS/yooA6XUMD4j2I64iH867PFcZc4k+JdtkdIv612nSqYCUhHeloPSpABzS00U6gAoNFFAB2zRn2pf4aSgAJozRS4FADTzQKdgUnegANKKDSUABoFBoHWgBRVHVrkWlm7k8kHFXxVHUtNGpxiEtjmi9tSJqUlyx3PM7gl5pHOck5zTME80utCWy1RoLdlZF4z2qmlxMOXIyeuK0WKS0PPeQV6kucskgDrzSFuOSBUDzMRkYqCUGReWxR9bRrHhus92W9wB5Ip4Ixyw/Osp4iT99ht5NPAAHeoeO7I64cMLaUzRZ0UcuKi82MDJequcr2pcfLUfXjpjwrT6zJzdwjjJq1bKt5NDDD8zM2MVmsBwamgmME8cinBUggin9dbNHwzRir81z1jRbFtP09YXHzZyauscmo7Z2ksbd2ySyA5qXtmqbUtTk5FR9wSkzzS9aAtIsKKdijFAB2xQBxRS0AFFFFACUvQUufakPSgBKUCkpwoEGDnrRmlptABgk9aXGKB3ozxQMTHNOpnOaeOlArBwBVLXP+QFqH/XBqu/WqetYOiX/H/LFv5UCPMbMYsbfn/lkv8qKbaN/ocH/XNf5UUAdTMrf8LIiP/TH/ABrsD1rjizSfExFzwIf8a7EGqYBQelFB6VICClpO1KOlAAKXvSCloAdTe9GaKACilFBHFACUUdqKAFFKelIKU9KAG04dKb2oBoAVs4qC7l8m1klzghTVjIAz71znjK8ms9DCR8NLJtyOoFRUdkb4eN5o86u5POuZHIJy55qMDApAGBwc9c0p9q4r3Z9VBcsVFgaYwJHHWnnOKacHg0mUtyaztGu7yK3zlpZAhxXWap4MitdPluUkz5a7sVm+DbbzfEEbBcrH8xPvXoN2qzafMjrnchGK3px9y55tfEuNblseOjaBxx3pBmnzrsmeM/eU4qNS2eelYu56EXzIcelN2kocHk9KfSYxg0k9dQnCLTR6n4Xu2vNDhZjnb8tbBrmPBEgOh7c8iQ/0rpt3au6Gq0PlsTDlqNCigmlpvU4qzC44HIopADmn4oC4mKKdTe9AXDvSnqKSigLjqb3o3H0o70DHLTuKaKdjigAP3aaKKKAAd6Q0tGKAAU6kwKWgBD0qnrH/ACBb/wD64tVxulU9ZGdDvz6QN/Kglnltpj7FB/1zX+VFR2jf6FB/1zX+VFAHVtlfiWpA4EHP612WK41SzfEt/RYOfbrXZBgeRVAAFKelFB6VICDkYpegpBSmgABpaRe9LQAd6KO9FAADilzSUUAKBkYpMU4UYOTxQAYxQelKQcA44PSkBB6c0AJjjFN2nNPpQM0AM71la3ozavAI/NC7Rxkd618Y74NZGv6x/Y0KMEDlzgH3pNc2g41vZPmOD1fw7Ppsu3Jl46qMVkeWycMpB9DXXf8ACRDU5cSqFNWDZwTR5MYZT3ArjnCzPdwua06sPeOHKtyR0pp6cj6V1Unh2GRt6Myr3qay8GNPNvMuYwc4qErnTLGU0jS8Haa9rpslwy4kk6H0FdMQpGDyCaSCFba3SJfuoMYpxKsMYwAc12RVlY8GpWcqvMeU+I7M2es3C7dpZt2PY1l8kZA4r1i/0Gy1KUyzKTIwwTXP33hS1g5QnBrGdOzuexRx0IxSkcN+NNG4yDHIHOK63/hHbbGdzU3/AIR62353sMVju7HQsZT3Re8AyHF3E3TIYD0612u0k5A4FYnhzTbTT4mlgYs0mA2fb/8AXW8TgY9T0rtpbHz2KqKdW6G4pMY5p1ITxVnOIo5p46imrSjqKAH4puOafmm0AIFzRinA0jUANORSDk0o560vAoGOxikLY4xSZ5GKUigAoAzRSigBDxSZp2MmjFABmlptOHSgAqnrJxod+cceS38quDrVLWv+QFqH/XFv5UCZ5Naj/Q4Of+Wa/wAqKdaQyGzgIU/6te3tRQB1Wm7n+JN6TyFh/wAa7QLXHaQw/wCFh6ngcCPH867IHK1TAKD0pB1pT0qQEBpaQCloAM4ozRSYoAdRTQeacaACjvR2pRQA4VzvjDxRD4XsEl8oyXEpwi564rolBJA6fNyfavHvH0s3iXxTb6bY7pPswAbbzgk8mmiZS5Vc7Xwx44tdeR2ltzbye54rqIriORV2yI2B1BryW3006RAlsVZHC/NVm01C6tSfIkcKPU1ooXPNlmCjK1j1XeD3FA27h6/WvOY/E18h+Zt1LJ4ovWBC8Zo9kV/aVOx311eW9upMsiqPrXB61rK6rdfu1/dIehrJubue7OZ5C3oM9KYhI71pCkjz8TjXUVo7DJIwWLJlT7VbsdVksl2yBnWoCxFHVeQKToxe5z0cRVpu6Omsb6C8Kpu2MemTXX28Zit1Xj6jvXnNnoN/dbZbcFeetegafDPb2KRznMg6k1zumk9D6HDYmpWj75Z780p2+lJQamx1jXywIUfN2NcPrOq6nbXTwSr8g+6cV3XUYqpfWCXts8ZUFyMA4ppX0OfEKfLeJ5sdZuiMbsc0x9VujIeeOmabc25huZY3/hbFRbRit44eO54VTH14vluaWmavfRXCRKSVY9K9JgZjCjSD5iK8tsJ1tr2KUjhD0r1GJhNEJl6MBgegpVIcux6GXVp1d2SdaaacKMZrI9MF6Ud6OlHegB1FFFAB3oPWil4xzQAlGKD7Uo6UAAGKU9KKKAG04dKMCigBDRzS0UAJ3paMUUAJ0INUtc40LUj/ANMG/lV7Gaoa6R/YOog9Ps7fyoA86sWH9n23/XJf5Cio7Ap/Z1rz/wAsU/kKKAOg0JC3j3Vmxxt/xrs/4a47w9vbxzq5xhQoz+tdh/DVMAHWlPSkHWlPSpAB0zRQOlGaACjFITRmgAAwacTmkooAKXtikzQTxQBW1KeaLT5miQtIQdoBrkfAHhy4srq81XUUIuJW4Dd/eu4CrwTz7U47d4J6AdBRewXMfXNBj1KMFPlmA+961x974fvbJRuQsAeor0kcgg4prRo67WUMD61tCbOCtgoz1SPInV0chlwacoPpXo9z4fsronKbSe9Z0vg6M/6uXArT2iPKngJ3sjh3QlgQKeFOOOtdkngxQwLzbhV228K2UDbny3tT9oif7OqdDiINPubogRROxPtxXTaP4UYYlvOMc7a6uGGG3UJEAF9MU8sNx649KxlUb2PRoYBQV5ISGOOKEJEu1V6UNknk0u7v2pM1melCKSshp4pCaU000i7juooBO4nOOMUg6UoOKPMN0cH4usTbaiJ9uI2HPua51RwBmvStfsPt2nspG4qMjFecNH5blTwQeldVN3PnMdT5JXGuvHXHHWvUNGmSbS7dkbP7vBry9znAIOO9d34Nl36VIMHKtjmisuxeVzSm0dFSZ5paTua5T6AM04UzvTx0oAWlxSZpw6UAIOKRhTqQc9aAGjrin0w9eKevagBcYoNLQ2KAG0tJRQAGiiigAoooDc0AHI5qjrYDaFqOehgb+VXyflrP1z/kAaj/ANcG/lQB5xYJGNPth6RL/IUVHYg/YLbn/lkv8hRQB0vhjMnjPXGzwCP612DLXHeFRjxZrp7bh/Wuz71TAYBiloPWg9KkApvejNLigAFLRRQAUUUUAJjNGcClpMCgABp1NwKM9qAHZxQDSdOvNGfakEW0PzSg1HSg1Q9B+aCeKaaT8aRItLTaM0Du3uOpv8VKKb3oEKaKKXtQMSjjv0pvOaU5oYIcQfKO0Ak9vavO/Ett9m1lm2YR+a9DUnOe9YniXTRe2JeNAZU5H0rSnKzOHHUVOFzz/blMZ5zmus8Fy/PNb56jdXJfxkdxwa6DwlL5eu7RgBkxmumorxPFwb5Kx33SmkZOc04DcTgg49KMYriPqBhXmnA9qTPNGaAHcU4dKQAY6UtABnFJjNLiigBAOadjFJRnmgBc5o70nFGaAFNNHWlpKAAdaWgUtAC/w0zHenUUANJ4qlrhzoGocf8ALBv5Vexiqeuf8gHUMf8APBv5UAeZ2A/4l9t/1yX+Qop1jj7Bbc/8sl/kKKAOj8KYfxDrjg9XA/nXYdOa4zwKxk1PW5MjiWuzJyKpgNpCaWkPWpAAM0uKQU6gBMUUtIelAAOaKQGjAz1oAUnFJmkPFFADqMUUUAJz60c+tLRmgAoozRQAvWg8GgUHrQAUUUZ7UAFB60YwaU9aAEzS4zRil6CgBOlHWg80negBcUhA2McZOP0p1Ifu46Zzx600JxjJWkeVeL5rXQdceBw22RA6lRkc5zWRb+M7TT7qKe2hkd16ivTNf8F2HiaeGe7leJo1wAlZa/CvQo2yJZie/wDnFW6jascawVNS5kRaT8UtIvcR3Qa1fvursbO+t9RthPavviIyG9RWHb+A9GtiCkQbHdxmt+2tUsrfyYEVYxxgVmdpLwf8aMUgGDTqAANgdKcORSYpaACiiigApv8AFTqTPOKADHvRilxRQAYpaMiigAooxmkx70ALRRR2oATPHSqmtAf2BqGT/wAsG/lVwAnoaz/EAP8Awjup45P2dun0oA80spB9gt/lP+qXv7CirthaD+zrXI58lP5CigDZ8A26/ZtSmVsvJOeK7I8jIriPhmu5b/LMcS8c13DDaMCqYDCKbjnrT/vdaTAMgXA5NKwCUuT6U5ogD95vzpyxjb1NAEec0h6VKyhVXvn1pMAsBikBEBRjmpmUL0pqoGOcmiwEZHFOI+UVI6BQMZpmeKLANpe1MLU4HIosAEZApCODQzEVHJMwQkAcUWAevvTqoi7kz0X8qY97KpOAv5UAaR6ClHSshdUnJIKx/lTjqcwQnbH+R/xoA1KO9YT6xc9hGPwP+NRvrt0owEi+u0/40AdFS9q5G48SX0TJtWHnOcqf8azLzxpqkMnyLbgf7h/xoA7/ACuaXG76V5gPHms/aAP9Hx6bD/jW3a+L9RkTcyW+T/sH/GgDtQAOKRs7hiuLPi7UMn93bf8AfB/xo/4S7UcH93bf98H/ABoA7TmkINcXJ4v1EbcR2/8A3wf8aD4w1Ldjy7b/AL4P+NAHbopNSCM4zmuG/wCEw1Hn93bf98H/ABqRfGGpEAeXbf8AfB/xoA7TZSYrjT4u1H+5b/8AfB/xpo8WaiT9yD/vg/40AdmRRiuQTxVfnqlv/wB8H/GlbxTfhSQkH/fB/wAaAOvxRiuOfxXqAQEJb/8AfB/xp6+Kr8qCUt/++D/jQB1pGKUZx0rkx4ovyw+SD/vg/wCNTnxDelc7Yh/wE/40AdLRiudXXLsrnbF/3yf8al/ty6WIHZCTjup/xoA3cUmOa5oeJL1mI8uAY9FP+NTR69du6qUh5PZT/jQBv45p46VjnV5x8vlw/Xac/wA6cNTnCA7YyfpQBqnrTR1NYrazchwNkX/fJ/xqc6rMImYRxZx/dP8AjQBqUoHFYMGt3LFtyQn/AICf8asLq05AOyL/AL5P+NAGtgjms7xASnhzVH/6d2/lUcusXCpxHF/3yf8AGsXxBrV1J4b1FCseGgYH5T6fWgDKsDnTrU+sSfyFFVdOuH/sy04X/Up2/wBkUUAf/9k=',\n", + " 'file-path': 'D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_agent\\\\blocks\\\\out_json\\\\test\\\\figure-1-1.jpg',\n", + " 'pdf-id': '9f36b3cde65f4356b896f17bac0a83c5',\n", + " 'image-uuid': '5da9f6be-547c-472e-a083-baacd34a87f8'},\n", + " 'Sprite 2': {'base64': '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAFAAZkDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwC0vhbXNp/4qaYf9s//AK9DeEtaPJ8TT/8AfuutJxSfjQzQ5IeE9X/i8TXH/funDwlqvbxLcH/gFdXmjNAHJN4O1Y9fElx/3zSf8Idquf8AkZLj/vmuuzRuzxTuByreEdRKj/ipLjP+7TD4M1QsCPEtx/3zXWjApSaLgckfBuqj/mZLj/vmkXwVqgOT4luP++a63NLkUXA5NvBepN08S3H/AHzTR4P1NeB4luCf92uu3AUm6gDk/wDhENV/6GS4/wC+aZ/wh2p/9DJcn/gNdhvoLUgORXwfqYGP+Egu/wABig+DtQI/5GW8H4A11m7NLnigDkB4O1QKdvia4/FaUeCdRPJ8S3P/AHzXYFgVxTc0Acmvgy/x/wAjJdH/AIDTv+ELvxz/AMJFct7ba6ndg0pfigDkm8FX5f8A5GG4X8Keng6/x/yMd5+ldWGoJFAHJHwbfk/8jFeflmlHg2+B/wCRivPyxXVhsUu+gDkv+EOvs/8AIxXv6Ug8GX55PiO7/wC+a67dTS2aAOUHg/UOg8SXmPpil/4Q29I/5GG9z9RXU5xSg807gcoPB14Dz4hvfzH+FL/wh95u/wCRgu/++hXVdTSZouByo8G3hz/xUN51/vD/AApf+EMvB/zMl5/30K6nil4FFwOUbwZdjn/hIL0/8CH+FH/CHXg6+Ib38x/hXV7hRkUXA5RvBt42f+KgvMHj1pF8G3QAA1y7IHfFdZketBYAdaAOVHg26bj+3br8qQ+Db7tr92PxFdWHp2RSA5IeDL7o3iG7P4ik/wCEPvf+g/ecf7QrrSwBpNwoA5NfB93nnX7z/voUN4Luj08Q3ddXmk3UAckfA1w33vEF4fxoPgaftr95x711wYEUvBFAHG/8ILck8a/d/nR/wgtynH/CQ3ea7HdjpSZoA5H/AIQW7xg6/dH8aafAMxBDa7dg+oNdiDxSE0AcWPh7MQca/e/nT4/AU+5Y28R3ibjXY5oB3ZHuDSYHlqi70Lxvb6e+pz3EbSoMP3ycV6l5p/uGvMvES7PibaHGd0sZ/WvUuP7tFgGNzTCRT2600rTe4CA5p1NAxTqACiikzQAuaXOaSjpQAGm496UmloAQD3oxS0GgBvelxSAHNOoAb0NOBpDikGc0AOpM0tNxQAUZpdtJjFABQaKMUAA4oooHWgBcUg4pc0YzQAlGaKKAFUmkpQKSgBDRS4zRg0AJSYp2DSYxQAmKMUtFABRRjNFABRRRQAZpMUtFADSKUZpaMelABmiiigAzRSZozQAtKDhuPSkHNC/e/Sl1A838U/L8StN/3ov/AEKvSfOavNvFhH/CyNNx/ei/9Cr0PJ9DVMC245FMNSv1NREUdQEFLSDiloAKbTqKQCClpQKcBQAzFFS00igBlIelLRQACiiigBrdaUUtFABQaD0pBQA7tTTTj0pvegBKU0hxSgigAPWkpT1pe1ADacOlFBOBQAxjSKaQtmlTrmgCQU2l4pc0ANzzTwKaRk0ucUAOxSEUbqTNADcYoNKetJQAopKU4xSUAFBoooASiloBzQAlKOtLTe9ACmkPSlpDQA2ilFB6UAC9acPvAf7VIKUcnHvQB5v4uXb8S9Lx1zF/6Ga9Dy/pXnnjL5fiDpb/APXMf+PGvQM02Bfk6moiKkkByP1qM0uoCClpO9O7UAJRRjNLg0wFXpUmOKjHFOBpAKaQmg80hNADT1pKU9aQdaAFxSGnDmkNABimmnHpSY5pgHXrRikJoyPWgBe1MfOOKd1PFJIcJTsBCWagE0hJNANOyAkVucGpahUDPNTCiyADUTydqkJquw5osgDPHFTRnIqHtUsfSiwElIRS0gzUgA6UhFOooJGUozTsYpo60ABpp6U/IoI4oAYRxSjNOxkUxiQKChc0o6ZqDce9TfwUrAKeBUTSYqXPFQv16UWAVZMU8MDUOM806MHfRYCYc0EZpRxSUwExQRS0EUgEHFKmd2R60lPGCfxpoDzjxvH/AMVzpQ/2oz/49Xe4NcF42JHxA0oDkfu//QjXfc0wL0nPWmEcVI4yKiFTbUVxp4NL2prNg03ec07BcmFFRiRqVWZqqwx2aM07bxk1GXxU2AUZzTqiEnNSBt1FgFxmjFBAApF60WAOhoNRN941KDRYBaa+dvFOPJoxmnYVyqQ2aAMdTTn4NIBinYLkqUSDIpqkdaViDzRYLkLHFA+Uc0vU0uDSuAiNg81NvAFRClphcUyCmGlNHWlcLjaljqMjipYhxTC4+inU1yEGTSSE5BR1qAXUJapRIjcqaLE8yGyEgVHk+tSyZKHBqLqMmnYOZACalGcVCGBPFTIaLDuPFRyU/rUcgosUndEZ7VYHKCq+OanQ/LQAuKgfrVioXoAYOlOQ4amY5p6DDUMLk680YoFGam4XEPApm7nmpOpqvJ96kFx4Zc1IvX8arVYhPNNBc898btjxzpQxzuj/APQq7zFcL47Xd4x0g+6f+hV3e2mFyzO+0gCmBjTrpcMKjpCFfpmoiakY5FRjk4pgKDUkYz3qFuKliIHWmMlHpUcnBqQMN2Kjl60rjGAVIFIqMU8yZFACsRinFwq1CSSKCSVxQAjOM5pwcUzGe1AxQBOrZpScVGh9KRmz0pksQnmkNIM5pTTENxkUAN0peRSbsj72Pei1wbsO6Hikw27GKo3WrWtqNrSBn9uarWl4dRYsrFAO1E4tRuQqkW7JmxjHVhSZGetUw6YI3EfWk3qOd9cqql3LvHrQAOxqn5gzjdUkRO/aSc1SnzAWTmpI/u0ztipF4WtEwYu8d6huH/dN7UpxmoZT+6ariZyZnE7uacrsvc0z2pcetaqJzt6k63L4xnimrO2etRArml4o5RcxZW4GRuH5VPDcxuyqD8xzxWcWwR6U63dRdRkDpnNJo0jM2VNNlI5IBwD1NPDgKOKztXc/ZyPKdgGA3B8fpWZ1R2LG7k49akRsdarw5CKGz071Ln0oGSGSmE5phY+lO6UwYlPU/Nim+9Kn36BE2aQnHWk3CmuQRU2AeHWoXIJpOlAxRYBKmh+9TKfC2W54oA4Lx6dni7Rvcr/6FXcbq4n4goo8V6L9V/8AQq7XbQBcuVJeq+as3HYVXNACZpBwc0EgHLdBVSe+gtydxyWPQVSjchzsWzzQARUEV3FOAEILehqwH2jBxgdKdrblRmpbD4zlqWXrUcRBbP8AWpZcYzn8zUNovlZHRTN4HQj86DInYj86SaDkY/tQKbvHcrRvB4BH4GndByscOtNwPWl/P86QEMSoI4pXCzHx9aae9KrY+aoXmVCWcgAc81SCxLSZrLg1cT3xh2KoPG4mtEHmgiwrOADnsM1zWoaneTStDEVWLuR1ro5RvhYZIODXICJ5ZGLsFRSea6KUdDgxVSUdCuYhuyFLHuTWnpT/AGfc8o2imxRGRNsafe6tUi26xIc4k+hrWsk4WOfDKUpmh5sTfMGBBpDJHjlhWaZoUOGxTxPDtyANteX7NHufVpbmgs6YyCKmt7hWnHzVkpcQMcFatQRI7gYAxznNLlsTKk4rU3dwY5BzUg6VkLczwSdQ0ZOMCr0N3FJ8oOGHQVaMU7kjA5qGYDy2qwBnkCobj/VNWsBT2M4CkJ5xSqeKOproS0OOTGjrTqMCkOabRCYHrSxLiYGjvzSxnMygVLNYvU1DnC1S1Jh9nCbjvJzirpB4B6Cudv5PNvjJn5ANtYS3PSpRujcidXhQg5OOaeBzWZo7ko0THheRWmvAHqaXUcoWA04HAyaKguphHCSDhquxDEkvIo2Ks2DjOKrR6nHJLsUfjWPKHeQsxznvV2yt4ov3hf5vSqSMlzXNoSKSPekySaptMhG/PSpbeZJRndUtGhYAxRTQ+e2T6012IfCtSSJuTCnx8sMVQLujcMCPer0OOGpNWA4r4gL/AMVPop91/wDQq7PNcX8Q9v8Awkmieu5T+tdluakWi5cH5uag79OKXULlYCoOMnpVE6lEI/unJpOxDHzSEbsjoOK5KUTtfNKxYDd+FdMNQhIOR9aa9zbOgXYoBOc4raMkuphOLZjXkEzXERtQ+cckVo7Lr7FAk24PnnFXBfQAcKMj0pDfwvjcM+lTVnGS0KoJxepSaCcSEktjPrQYbnJPO0VvZhFusmzJNJHJE77XjxmuXkZ6HtY9jDWKX/a/OmmGYjIzW7K8aNtWLJqWNYZICxTBFHIw9rHsc6Ibgno2PrSi3uGPQ4+tbW9D/BgD3p8oiQDAOTT5ZB7WPYwDDcY4Vs/Wrlgkkd1L5m7gDGa17cRSgqUwaryJtnfnrVxT6kuaa0QuXxgAVS1EblVXAwetXd4BIyM+map6mC9rkfeGKu9hQTe5RMcDfMqAFelbMIyg+lYcGDJGxIHsa3Y0AGMiiLuKdNop6jPLDFiIZz1rEjEQVv4m5JB9a3r0AR4zz05rDSLDSZHOetdVJ6HlYmLvqRJNK9q6gbM+lSwqPIO3PvUkcDSQyDoPWlSPyoSKdV6Bgv4hVaJcBggOTzmnFEDABB9BSNNCpAaUKc9KaJo2DNuwB3rhZ9QpaWJDGpzhAKtW/RunSs9bmIsf3o56ZrTscEsu09KiRz4he6TxFFgbHLVGXUhWb5WHcVMY1WNh0qARq2A1LU8m1rk8FxMHxyV9quz/AOpb6VFarhgMcVNP/q2Fb0yJPQzAeKTJzxS4x1pwArqWxyPcQUhPNPIpm3mgkCQKSI5nXHXNKwFNjAWZSfWk9jSG5e1G4+y2bFj8zD5RXNsQ5LEnPVhmukaCK8nzKudg4BNV2TR15aVBk4OGrknKzPbw9uUy7WcRT7yxwTk4Paujjbegdfunoappa6YWVYRvDcAircMKwKVGdoPFCeo6hKRxWffLvkBrQ6ckjFUbwt5wUDqK2Tuc1igVXGcDFPLDghRgUFSQQRiiNDtINPUNtBWOUGAOantcdNuKgIKge1V7m4eJvlOKaTbIqTUFdmzAhEjHtVDUxKLKd43EbqM7j2rMGoXC5w5/Co5Ll542jkZirDketa+xZxTxcXsP0Kee6ty0oLD+8f4vpXVRKQioeDXHRu8MflRMVjA+UAdDV+wv7lrqGJ5sgn0qZ0XYmGKu7GZ8RB/xPdFAHzBl5/Gus3PXM/EYFdV0STjPmAfhkV1n4VyvQ9FNNXJdSiiMiFxng1gS6jZwv5bQNxXQajw6/wC7XNSx/vM4B5rx61aSkdtOjGSuyzcXFpDbLP8AZ2w1Nt7+0upFRICpPGTRcqDbRggEA5qGJCZ0+UDkEYrH6xM3WHh2JZr21guJI2iLeWMnFW7IW97GJkjwtULlC12TjgnFX9NGExnHNaUq0nImrRio6GvOI0t48dKr5UzLtOKsvFHLABIcrUMUcCNlYzwOtetFux57iQls3JBNTj5bN2BoP2aXMjDbimmRPLMRiO1u9VcOUryyAQgj0qWbIMbf7NP8qxEWWbdtHTPelkZGjR/KOKLhyhaHdI2axdavr21vytsVPHet+2SHaWQYHesTV4R/aBc9cVLdkdFCCbMWTV9Zf7rQLjuf/wBdDX+qTKAzo/HO2r6WO+RSTgdaasKtIEzggkVn7Q71CKMlrnUUI5QY9asHV9ZIBjMYFaL2oXIk5I6U2O0WRORilzhyLdlI6jq868tEVHU1dsJNtvJLPhm9BTnskjj3DvxRbW4hSTHO6umjNnm5hSjy3SIvtEkkZ2LtBqRMLan5t0npQsDbGxTzG0cBDLiuqpqjysHBqoUXjViuVTJ9etDRxxgrtAz+VKzR4+Yg49aQkOmQFKj3rjS1sfSX0BIFJPyKcVo2xxk5I4qS1VDArBADU3Hda0VK55dfE68pEZRHEznLe1LFIk8YONh9Ke2NuStNCr0UdafsfM4eYsxMiMPmp8sy7H+btVLPOMUOxC/dFaRhYUpaEaOMHJzzUiN81RODtDYxT0PygmtDBj256UzNOU80hpksTIzzSLtMq845o27hSgbJF+tS9i47mkgO9sjIx1qjFbROm1ogFz1xWih/euB12VSgJEB3dQa8+qrs9qj7sUSxxKk8YUAAHjFTLIoEjOwVQepqOHLXEZC55Ga5fxTqzB5NOgG0B8sa2w0HN2MsXVVON2aeseJItPaOOILIzVDba1HqbfKhDKtcVtaRxubKL3Nbnhnb9pm+Y9K9CdHljc8mjjHOpynQjJiU5pAT2p2AVxnIoUBR7VznsysKM96pX336uDbtqtdAM3FaQ0Zy4v4CnBA9xLsVsVdTSZP4mqtamRLltjKAFJ5q2s91JEzeZH8q1pKTueNBLqMfTT5gQSYqxZ6ZKtxDJuHBqL7VMkqj92WJFWbe8uHZCyxgM2KzlJtGsFFSMT4lFjquiDI/1g6fWuu2muR+JSuL7RWYAruHT1zXW5HpXHLc9eK0J9Q4Zc+mK5s39sJnR4z8jfMa6TURkjNc44BZgUGd2c4614OI+M9Si/dJ57iCKzSdkZkfooHNRWuoW73CRiGRSemRUsx/0SEFeMce1QIN1xHxgBxlj25rA6USXN9Asu1o33Bu1WbOaOWEvGpABxg1XvFBvZQACuMg1PaZ8vjAOelbUPiIrfCbUg/dLk44pmcAYNSy4MSHrxUXAXIHNe1HY8/lIp9oZWYcHsKqPK7sZAx+XoO1WpGGU4zmmCBM9evUU2NRK7x+ZNCyDCt98e9aGSRtB4HaqzuEu4kGApHFTdqWo+UfAw88oPTNZWqP/pjHHQVet2J1Aj+ELVDU8/a3+lTJ6GtJWZCpc7WXI4piAmUZBPNKHZVQetNLsjBgT19Kwudg9ly0ineGx3NIuRuHPA4oWQs7k5LHpxQuSTgk+uKLh0HsWMajtUluuSQSOoqB2JAHI/Co4pY7ZzLNIdgPOK6KL7HLiIqSVzRuPLsYWndspnsKRla7txImAp9ab/a2m3BWFLuMKDlhIMVSvdYgjfybaVXYf3Tx+dbc7bOeFJRd0DaPvLNuX6VUFsoYxp1zzVu01uGJGE5w59OahjlLzNJCQwNONrnTUlLl2LzfLGiZxj0FI24kENke1NxelAyui59ahxfDjzofTpXRdWPFqUZuV7Fh34zuI4xRGRgkMc1XKXi8NLGxz9Kd/pBIIkQY60uZEewn2JgDuzuNI2H6E0wm4wP3qc0xo7ksP3qAmjmQnQn2HuRwN350q569vrUTR3rg4kj446UsKz7sSspA9KtO+xhOLg/eLC5HJpSaarBjtzSk84qkjNseopNubiPJwM0oGMDNNYhZlcngGsak4xWrNacXJ6GnbDdcswOflIxUrWicnJye2aoWTSzXDMr7SAcDFXRb3rNn7Yo/4B/9euNu7ueuo2ikTwWgSWMgtnPIFeb+JxnxFcnJAbsa9CFvfMwzfAY6YSuY1zSftU7MDvmHVulb4esqctTjzCk6tOyOOJIQKF4FbfhpsXEnTJHNZs1lPA5DKQPU1e8OZ+3y56Y4r06lWM4aM8XCQlCqkzpmyRgUgBAwak3YTIGaarknkVxWsfTSG44qvdHBxVln5wBVS9OHHFaR3OXFr92U0LqxKk5PFL50salQ/BGCKsWWx58N0rR8m0J6AmtXJHhpXMZnkkdG34K8jFWLKS4Wa3j8wNh88j3rSdbaN1UqAT0qa2jtvtCMoXO7Awe9ZSkrG0IXkjC+J7f6VorZ/iGR2rq9n+1XK/E5ARpDgfdlOf0rp/OT0NcTaue1BWRb1AjI4zXPyXlmrBWh+cZz8pre1HAYD1rmZVHm7gB9+vDrq8z0qDXKXpLiJbdJGUqmOAFqGK7t3lRFRwz9Cw4p9yCbWE4BqC3G50BAHzdaxsdSaJbm7gScxtGWYDnFTWkiyxgou3mq12qpc4wDk9TVm0HynjFaUfiM6r0NeV2S2Uj5jiqouG2klecVZuMCyjqh1P417MdjjSCS5bdH8venfaDn7tRso2/8Cpc1RTQrXDm5jwq7QD1609rnAB29RVaRys8WBnrUpx8ox1FAInsm/wBNdivG2qmoAi6fI7Vbszm6cf7NVdV/4+MZ7VMti47lZeCjY5qNdwlXd1J4p3nYAx2FRq+595PTmsLHRclx80qk8gUsZ25CelNM2Sxzy/BpolwW20tik01qSuP3YLMMZqo8lurMjpuUipmm3RgSNgDnFZ10cy5zweldmCheTPKzKs6cdBIYbL7UzSW4K7e9PMenkYWLbjriqzdKaDh69P6vE8H+0aiJ1htBcZ2HYR909TVkGzTP2cOp7g9Kou9RhiDSeHjbQuOaVG0mdCxBsAzHAqgzLtDAn71WFDSaeFTkimvayMgGO4rhqJp2PocPJThdiTSrv79KI2Ty2bmhoX877ucVIkTCB+OpFZ6m/uldsZBycU53QT/MTxQ0MhG3FDWknn716EUah7pYt3V5G+bGFziqs12ACoHJPWrVupVnyOi1jzS4mP1rswyueFmslFqxYiuNrZzVkXmWGazQQeacoxXXyWPGVV3ubH2hCBg8niporWThm+6azrFkM6g9zXQkKUxzx6V87mUpQnZHvZc4zjcdZoqSHHWrwBPSqNu6ht5YKo/jbtWFrPiWQzNZ2DF07yjpRQbcdT0vZOWx1ue1Zcqgyv61k6Dr0rMbW+cAg5UnvWq+WuJWBBVscissXF8t0xqm46NGfe2fmxncAw9Ky9P08Wt40nQNXRuMriqNwpUrUYPETU1G5hPDQb5khCpAIB4zSqqleW5qM5BzTk+UZ8vNe7cJa6DjEAMhqqXq5PWrKnLdAKrXwYNgCnGoo7nNik5QsinayRx3BMjcYqyl7AkcoG4ZGAwqkYjnJWlCMP4K19pSerZ4ip1I9C1NeW8l3AR5rIGUnj061Npd/biVY33hzNxxWeFAGFBzU0EbLLGx/vis5VKdrJmsIzcldDvic/lwaTnJDSnqPpXSZX0rmvief9C0Y9f3pP8AKt3zB61yNxvoexHY0NZmji2M5x2rDlu7FiERSWzmtHxMMwwgjnJrBgCbTlQD6044GFRXZzVcdOnK0TQN/Zy7IhG2UHXNC3dmGG1WyprNlEePlB3etSLgRDOKf9mUzNZnVNB7myc7mjJNOivIFYRgdazC6YqneSlTlSQR3FNZbTWwf2lVb1OzvFP2NeQuBnNUdgBHXHGTWYTdQ6C107ku5CxhvQU7T1ddFm1C6laRsnaMYA4q/qzWiZ1QxytqXZDgYAx83c0uDsyOetZ+jBpxNd3Tfu0UtjNSaR5l/NJIWKxLzj2pPDtFrGplg/61WyDipTwM89O9ZllNJqWs/Z0G2NT371LP5l7rQtIJMCNhu+lS6DsV9dijUsMteNg/8s8/jzTLxA1yzMM8VmapPPDqQsrbj7qMw65J/wDr1oXET2rLDu3nGWJrGvH2aVzXD4lVG7FdQhB4HWmFAuSFFSGQZ+5igyDIA25+tcvNfY7edLchwBHu2/MacpG3OwZp/mNuI2j2oD4Y/LSU1ew/aJK42Pa82CinjuKp3lpK8v7pMgVeWQqSwUZ9TUqlnjPzYyccV14WpyydjzswSqRME20zHbswaT7DcM+BGSa2fsyoWkZ+nXJp1vewvdRxRMrP3ya9GdeUY3seEsPFu1zJTSbxzgR804aTdoPmjz9K1r/XLGxkaOZmEnX5Rmm6bq8F9OI4iSSCRmuFYys3tobxwVLe+omm6Xd8swxEeoPWtYaeQmSwqd7iK1tRJIcBjgCoI9WtZXCBjzVpuep2063s1YrvZqvmuz4wuSagBs2jLLcAqo6Zq9c3EEAYSOoVhypPUVyymN975VQrcLu6it4UHbUiePSdkzoRAsqI6HKtwKn/ALN+UHeBioob20WO3hikQu/GAelTXOr21s/lNIGYdQKzdJplrGJrcYdPBjkIYH5T0rjJAQ7Aj+IjNdxbX8F7FJ5J5UHPauMnVhcSqezZrpw8eVnnY+oqkSvyrcVIT8uaRhzmmnkEdxXX1PKvpYs2Z23Ubk8Z6V1JlEcbOx2jGSfaud0mMG6UsAcc4zW/e25ubOZA23cuBXzWZvmq6H0mURtDU5y7nuNWuzDASIMgccZrQt9Piso2iTax755qVLdLaAKBwq/eHXNVmm+Y46etXTVoo+ghotBt5YJclSoCOFxkVBZ6hJpczQ3IJVyBuParUdwXYL6d6nuLdLmIBgCSRg1FeKa1CTbWpoIyyKGVvlI4qvdZLqMDFWIYDbwJHuFQ3oOEI9a4MMrVjmnsQkEnBFOPGPSnsuUHrTD0r6FnLcYAnUimSYabBIxStux92gxFn4Uk+1cmL5lDQpWe5CV/eYVMj1pqOrFgwAC1M1rIXU/Oo9AKX7L8xIhbjvivEvXZajT7ECSxyElVHFTpCshjGP4s5prWLAb0R1b0xQsy2zLHMdpB5qqaruRNRUktin8TVB0vS2H8Eh/pV3fJ7VT+J7bNBsQO7nH5Cr29fevZgtNTJWaN7VbVbkIG7E1kDSos8Bq2r+Q4Rfc1C8rLBvjUlvQ1tGbWhlKhGTuZkmlQ8D5hSNptsvDMRVmKS/nuAWKrEByvrUWoudr4OMKeK1jNtmVSjGKKgtLMZBkqGfSkuSPKfCetU1OQrBucVraYxMbYatmmkcFNqU7C3VrLNYR2vmDZHzSSW1w2l/YiwEXfFWndoxufJHtUDs00ZCyLKD2JwRWXtGdscOpEVvbGCwktU5jcbSadZWstnZzQoc+b1b0FKiXiLtCYX2alS4MW4NtJ9jR7Qp4ZLqRadYSWErzxSqzkZXPpS2lpcW1696roZX6j2q0S0ipjjg09SyLGuc5FDmYez1KY02R9SN5K+SCHXHfFT3UjSzl2q6hwMmqVztad/pXBjZcySOnCRUW2hwtlkh+Xqe9VotJnhm82WRTHzwOtW0vobZFEgPPpTn1qzYbf3vB9KilBKJpUlK+hXuYliVGUdataZAj7i65qlLcpcR71yVDcZq/pUZk8wq3Ga5r2qG6b5BdWtolstyDByKyHvEghCKpZycACt/WF/wBAYDn5hXM3GnzTMWXjHeuqi/3mhz4i/IZ9wblwzXd2nlDkovWrGkwIbpJghEfY05NO2ANL81XLdfObONo6CvUxE0qVzyqULzK1/YXEt47wopQ+ozUmj6deW955krKFCngD2rdClI1HWlLHbxxXh/XJfCepHCw+IzNZD4jUklQN1ZQyMOAQRWvre9rRDEpZ1rEjnvLoor2zKvQnFenRrQSTkRPCOSdmZl/dy3FwXlc/KNoqoHcqTkj0rV1+yjsUikRTl+ua54zMuAD1PSvcpuMopo+brwnGbRq6S7vrNuqH5gfWtWSVnu5GZuQfSsTQudctwOpYV0slgzFpEU/OxBrnrOMXqa0YVJbC6ZK6XA+bIfIqvLBJJIzDuxFaltZhJI8LyBilIU5GOQazjUj0NZ05RXvHPXDi2cK9WYbJ5iCBhW71Q111W8jAHFdNaRDyYhlgAuetaTnaNznpxTmS6bZw25Yj53HerzEleePaorXZ5ZZSSM45qYDOQPQ18niJOVW59bg4qMNDJu3CwtzWXKxXFat4ALd81lN82M9q9KkrxPShqhYMli2a14sGNeax4MEsT3rSS5jhhTOODjmpqU3JaGdSrCGkmaqkOR7VWvW2sin1oOpWCFVkuI0Y067VG8t0YMp71wUaM4VbtHLKtCTsmQuJyP3Sg8VF/pwHzRAn0rQt+rD0FNkkDYBOK9lyOdys7FNftP8Ay1TaKt2c6wTM7LkYqSVgLbpup9lbJO7KxwMVnLVWYc2gNrBDYW3BHrihdTb5j5Q+mKmaytvLKl1Azjk801LCJnk8uTKrjOKySjsQ3J7ANUZRkxDH0rjNXuTLq0jfeAPauzNvZEc3HTqtcbqwVdXdY/u8V2YWEXfQ4MXOUbalr4nAt4c01++/+gq1mT0qt8TGP/CJac6/wyAfyp+5vWsajtKyPQou8Eddfgbl29BzVfzgRgg1Nfk71C9DVPBH8FYupFPc3iiXzECEBSCT1zWXqX+qdsfwmr5Bx939ao30Epgdtvy49a0p1ocy1MsRFuLsYq/6lAAORzWjp9wsMb5HPaqEccjxApGxHTpUjwXEbQgoV8w4Feh7WDW54tKE4ybsW5NXYKfkHFQPfwTp89un15on0+RshRuYcH0rY07QbZrUNdL8x7A1k6lM3j7Vu5iC4iJ/1Ix7E1PHeRKp2wJmt06HYg8KQPrVe+0mziti0akP9aFUpFS9qZi6sVyzRgbRgCtK0Zbi2WTGD2FYj6a3lMQ2B9a19MLLaLGykkd6zq1qfQIQnfUuFSRiqV58tyR7VbGQ3K/rVa5hklnLqvGK8jHVE17p6OFp73K7OW24C/L7UjTyBWXbHhv9mpBay56D86abWXrgH8a81V6vQ7fYxe5FHkxiP5cA5q5Y3qW27I6+lQyQPDEZCowPQ1W3hB9wHPvV05y5rsmcEo6F/VNYWS3WMAAs4H61VE7gFQc+uao+TJeXcccScrluTx7Vp/YLjezgL0APNdlOraVzPkTjqQszOm09PanwJtZF3cZqZLaXuo/Om+RJHIG2g4966qmIUoWuL2UFqkaJPGMcU3IIz0AqEPITnZn8ajlllUfLEN3pmvLtqUyeQeaBhutI6eXCCW4FMCSy7SFC46806bzGQqU47Vsp6oi7RzPjJHMFtIW4J44rlMAEAjnGSa7zXdHuNWt7eJfLXy+fmrDPgrUFGRLD05/zmvp8Lj6MKaTZ89isJVlUbSM7RMjWrVdyjc4r0C2j3Rk57mua0/wvd2l5b3JkjYo2TXUQwSxdh1z1rzswxcZv3WdeCoTj8QbQsqgNWBcWsjTSFZypJromWUkYUYz61RbT5GZmOM9QKjB4mCtzmmLoOXwo42/XyLgKzeYw9a17eF5vkMjD5fWpr7w3PdTJIrKp/i5rSttNlhG12HTrXo1cdR5bJnmwwVRSvYdY/wCjWnlrhgGzmrIuAQTjHFLFbSRdQuPrTlgckkquPTNeNKVKUrnu4fmikrGbdN/orEjNZLnLHGOR610M9nJNGUUAfjVA6Hdl+GiA+taxrwWlz1YTsjPj4Q4HINRTTFUYnr7VfuNPks48OwYseCKypItxO48V6uGUZJM+Yzeq5VLJmJcyPPJ8x6niuo0G7klso4pDu2vgGuetmtw84nQ5H3K6fwvA0mmkBVGJOufpVYpQSu9DmwE5Sna5u25PmPuGOMVA8PzcsOTVmOB0J3EEdqbJHIzKQo475ryvrML2ue44SvsK21Lcgt0qsLkQA5bBNXJQ7QlQoJNVXtJJEwdtUq8O4pQlbRGdNLF1MhY5zgdaspqEUUZSKfAfG4+lM/sedjujZR9TSPo8xI5VfX3q1Vo9Wcko1r6IDcW7PuMy5+lc/qJzfNIrDafet06NN/eWq0nh2eVhh0610UsTShszmqUKs2rof8Rxu8C22APlkU5/KqmG9TWj8SE8vwPGODtdeg+lZH2ketc83zyuj06a5YpHoF1naGA9aplj61bvgcLtHaqYJryMQ7SOyK0HE5HNNxkEEZB7UNQpwa51NjcQSMIQFUACormJZ3UOTkfdxU+TS1ftancnkh2KxQRZx2HOaswXhaMAL0prKG60KqqcCqVeSF7JExum/u1Su7gykQleasGmMnOaf1iQeyiQ/ZovLw4PSp4VCRjCEDtSjmnZ4xUSqyZSpRGlvak59aU00DFZuTe41FLYU560hXoPWl3Ec0ocs1IdyMx7gyHpTfs0OOY6nfa3K9aTrTTC42OOKIExphqXk5pQMUv0ouAh5GB1oBA6ikwSadRdgJwOgprDvin0UXFYTOB0xTWyR1p2D1paLhygC2BSHqTntR3pcZFHMw5ERg/KKl4HU0wDBp1LmbDkQ7cKZuBoPpQOKOZoOVBSZyaWlo5mNRQhOaMlRjHFLSN0p8zHZCZxzRuBGcUHpSA0XY+Yz9Vhkl8oRg8mqL6VcLCxxzg9BzW83Jpe1dlLHTpKyOGvg4VXdnL2nhTcnm3NywZjwuK2tKsfsFu0OefMq7ThU1cdUqbl0cDTpNNADgYpM80EZpuOa5Ezq66Dz0prNijFBFO7EICc07IzTR1px5FPmYWQuRSKpDClApe4pqTQmkYnxHIXwduc8eYP6VyH2eX+/XZfEFVfwdJu/hcEVxf22vbotuCMWkerXrEsBnoKp5Iq9dDLj6VTIrysT8ZrDYaeaVRzS0oHNc5YhJpaDiigLiHnilAwKSndqBDCaXpSHk0uKAANRxSjgUnJ7UAN6mlYUDrSsaAsCEYKn1pxTAzkYqIdaXknrmgLDzwuBim4xTsjbyKQ0AJnJoNABoNAAooJoHFB60AKelIeRQelHakAHp1oyaD0peMUwGjJ5qRcCmA06gBp5NLSt0poOaQB3oP3qO9HU0ALTc06m0DSFFBGaRTTs80BYTbTdtO3CkoCwnel7UAZpMHNAgpQTQBTjgUDQmcUgIzSHGaXAHNAxeKQkUuOKQrQAnWlHWkHFKDzTJsKCc0c/lR0p3BzT6iaML4iJJJ4LkKfwurH864banvXe/EFiPA023puXP51wO5a92k/cRzy3PZLpCcEVSq9dEqpHvVIivJxP8Rm8NgClhxQInzQCV6HFOEretc5Q0hl60gpzuxFRigBwBJ4p6wueaYCR0NP3tj71A7A0ZTkimU7e396mmgLDlRmGRSmJ6QMV6HFHmN/eoCwjIU5IpmSe1KzOf4qbjFAWJFjZhkCl8hqQOwHBxR5jetAWAoVQ5pg5NOLlhinCgLCiPIoaI44603cRwDSiRgeuaAsNZMD5qTGaC240uaAsPEWRS+Saj3N2NKM92pBYUrt60373Q0lGKAsSJESPvU8wnHWoNxU4p284phYOhwaaV5wKByaTPNILEnkGgQlTk0xnIPWjecdaAsDjFN79aU80EYoGSeVxnNJ5RIpm5u1GWoANhU80gQk0vLDFJQBIIGJyDihoiO9NUsOlDOc4NArCA7TzSD5+lKRkUAYFAx4gOOaGi4qPLHo2KPm7tQAYPSnKu7im/8AAqUE0ASC39Tigw7BnOaj3H1ppOaYherYqWJA26ohTxxihCZkfEFQPAlzxnDKc15tvNeneOAG8C3hPYivLdy+te5RfuI5pbnuN2Ohz3qoTVu7PAP1qmSK8vE/xGbw2EPSm4p3akHWucoWkxS0UAJ3p3agDNIQQaCgpOtLQATQAh60uBS02gBcCm4p1BwKQDTQOtO60nSmAgODTyaaAc0pOaAEJyeaBjNLsJpMYNILhikp1I1AXAAGjAHIpFyKXBNFwuFGT6U4DHWjcfSgLjTknpSgUuSaTnPPSgLiHrTcZNONCqTzQFxCBmjAxQetGaAuMVjuxThnvSheaUe9AXEBzS89qTgUoNAXE2NnrRinUlFwuA4pGXvThmkPp3ouFwIIWgDFKSSMUmaLhcCoxmmgZNONAFFwuNIFOpCDRnNFwuIeRmmgU/BIxSbSKCRQKXpigUf0poTKPjFRL4Iv1PGBmvHPNb1FeyeLcN4I1D2UV4p5qele1R+BGEtz6EvARtA96pmr93zj61T2gV5uITcjWD0G9qTvQVwetH41hysu4uBRSZ9qAKOVhcCCelAOOD1pwwOaXIJzRysLjaTdin4z6UwLg9qOVhcXtSdqGXLdaXBxgHmjlYXA9KTOaCcDBNHTvU8jC4vSjrSYDHJNLuAOAaORhcTOKBS9e9B+tNRYXF3YOM0jdetB570oHPWnyMd0IPrTT1pWUnvSBRR7NhdCjpR0oIwKOopWkugXQZpaQgY6U7Jo5ZPoF0NwTRg0p69aQ0/ZsLoKA2KbnHQU7n0o9mwuhp60DrTqQqexo9mwuhaKAG9aMH0o9mwuhD0oFKAw7UuCeMUcjC6AUnenYyMYpoB7A0rS7BdA3FNByaeQ3cU3Bz0otLsF0OxSYowfSk2t6Gi0uwXQuKOlN+bOKfsboOaLS7BdDCaBT9j+lGw+hotLsF0NbPQUgyODT9pB6UFSTjFO0uwrobmgcZpxjOOlIqEDpT5JPoS2uhV8TgN4L1Lj/liT+QNeIbV/u17n4iB/4Q/Ux/0xb+VeDYb1r2sOvcVzFvU+lHhEmdxx3qM2qe9Dajp4TJvbc8f89BTf7RsCP+P2D/vsVDopvYnmaFa1TPek+zIOxo/tPT/+f2D/AL7FNOq6Z31C2H/bQUewXYPaMf8AZ1PQUgtlpn9saX21C1/7+ilOq6YeTqVsP+2go9hHsHtGKYUJ46UG3UimtqulY/5CVr/38FM/tvRlXnVbQH/rqKPYLsPmZMLZB1FKbWPtVca9op66vaf9/B/jSjxBovT+1rP/AL+ij2EewczLH2VAOKVbZM5qt/b2j4/5Ctn/AN/RSrr+j5/5Cdr/AN/BS9hHsPmZMbWPPIo+zx+lQPr+jJy2qWmPaUVGviLRC2DrFn/39H+NH1ePYXMy19nj3dKX7NHngVSbxNoGcDWbPj/pqP8AGkHinQAOdZsx/wBtB/jR9Xj2Fzs0BboKPsyntWf/AMJV4e/6Dll/39H+NA8VeHx/zF7T/v6KpYePYOds0Ps6f3aTyVz92qA8U+Hyf+QzZj/tqtH/AAlnh0f8xqz/AO/o/wAafsI9h3ZoeSnp+lKIk9P0rMPi/wAOL11m0/7+D/Gmf8Jl4ZJx/bVr/wB9j/Gj2EewcxrGKM9QPypDFHjgCsj/AITXwxkj+27XP++P8aQeNvDLZxrEH/fQp+xQXZseUnoPyoESY6D8qxD428MIfm1m3H/Ah/jSSePPCyc/2xCf90g0exi+gczNvy0/uj8qPLQj7orAHxC8Ktx/aef+AUH4h+FIxltS4/3KXsI9g5joFiTPK0vkrjOBXNf8LI8KZ41En/gFA+I/hbZ/x/n/AL4o9hHsF2dKIVz90Ggxp02CuZ/4WP4XzxeM30SmH4leFVfH2qTP/XM0ewj2C7Op8tP7oo8tc9OK5YfEnwwG5u5P+/ZqU/EbwwE3fa3/AO/Zo9hHsF2dKI1/uilMa4+6K5Z/iV4XQDNzK3uIzTB8TvCmf9fP/wB+jR7CPYLs6sQDPSgRD+6K5Y/EvwwBkTzY/wCuZpv/AAs7wsOs8/8A36NP2SDmOrMYB+7SCJSfu1yZ+KXhbP8Arrj/AL8mmn4p+Fh/y0uT/wBsiKPZILs6/wAkf3f0oMa/3f0rkF+Kfhduj3P/AH7NIfil4ZHU3P8A3waPZIOZnXmNNuSooSMHnaK4/wD4Wn4ZbobjH/XImk/4Wl4cBwPtJH/XI0eyQcx2nlj0/SgxD0/SuLb4reGl6i6/CM0p+KnhvH3bv/v0aPYonmZ2JjGen6Unlgn7v6VyCfFPw2wPy3Wf+uZpq/FLw6zY2XZH/XM0/ZId2dmYhnGP0o8pfT9K4x/in4aU5Md3/wB+z/hR/wALW8Mn7sV5/wB+zSdNILs3/FCH/hFtRUDGYW/lXgnlj+9XqGrfEfQdQ0e9s4orjzJYiFyh6kV5j5Mn/PI1otEB/9k=',\n", + " 'file-path': 'D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_agent\\\\blocks\\\\out_json\\\\test\\\\figure-1-2.jpg',\n", + " 'pdf-id': '9f36b3cde65f4356b896f17bac0a83c5',\n", + " 'image-uuid': '8bd5f71b-0905-46ea-a6c3-8edf36240fc0'},\n", + " 'Sprite 3': {'base64': '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAE6AbIDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwCTSvBel3mnWt1NLdedIgLMJmx/OrR8CaZk/v7s/SZv8a1tBOfD9kR/zyH8zWlSuWcq3gPTD/y3vB/22P8AjTP+ED0sH/j6vvwmaurbtTaLjOYHgTS/+fu//wC/5p3/AAg2mD/l7vvxmNdKKVjxTuKxzDeBtOI/4+b38JjSf8IJpmB/pd//AN/jXTg0ZouFjmP+EF00/wDL1e/9/jR/wgen9r2+H/bY10+aTPNAWOZ/4QLT/wDn+v8A/v8AGnDwLp4BH22//wC/xrpiaTNArHNjwNpyj/j8vv8Av8aZ/wAIPpxOftd9/wB/zXT54puO9A7HNnwPp2P+Pq9/7/Gm/wDCC6Yetze/9/jXT5ozQBzP/CCaV/z8Xv8A3+P+NNPgXSs4Mt2f+2zf411GaKBnMf8ACBaP/wA9Lr/v83+NN/4QLR8/627/AO/rf411NFK4HMDwLpK97k/9tm/xpf8AhBdFPVJ/+/zf410xbFNDc0XA5h/AWiHHyz/hM3+NO/4QPRMY2TH6zN/jXSE5NL2pgc3/AMILoYXb5T/9/W/xpP8AhA9DAx5DH/tq3+NdJS0Ac1/wgehEc27f9/W/xpB4E0AA7bZwf+urf4101NNAHNnwLoYP/HvJ/wB/D/jSnwLof8ULY/66N/jXSZop3A5w+BdAK4NvIfpKw/rUf/CAaCDxDN9POb/GunpDSA55fA+hqMeRKP8Ats3+NNPgbRP+ecn/AH8b/GukzRQBzS+CNFU8Ryf9/G/xp3/CE6NkkRyj3Erf410RGaM4GKAOb/4QnSQc/wCk8/8ATU0o8EaV2kuR/wBtW/xroqUHigDnT4I0vtcXf/f1qaPBVh/z83gH/XY10uaKCWc0fBVkOl3ff9/jTv8AhELUIAL+/BH/AE0ro800nmgEc5/wiEP/AEE78f8AA6P+EQhHP9q6h/33/wDWro85ooKObTwkoJI1W+/77H+FA8Jg5P8AbF+M/wC2P8K6I0A0Ac//AMIsFX/kMah/30P8KB4YAGRrOoZ+q/4V0Gc03vQBhDw3JnjWb/8ANf8ACg+GbjJI1u949dv+Fbven5+Uj2oEcBrkWo6De2TR6pczRyybXD46ce1dyudoJycqOprjviESr6Wckfvu34V2SDKIw5+RabVguLRTqaetIYUUUUAQeG2/4pywP/THn8zWt2rI8MgHwvp//XL+prXPSpEhp5pCOKWg9KBjaDyKKKAAAYpKcKQimAY4pMUvakxTuAhPSlxQRmlouAFaToKCTR1FFwEA5oxSZ5p1K4DcU4dKKKGAUUUUgEYU0ACnmm0AJil7UUuKaAbiin4puOaYCUYoooATFLSZpaACkIpaKAG04dKTFKKACkwKUnFFACYoxS0E4pXAMUHpRSZyKYhKTFLRQCE6UjEg06mkZoGFJS0h4oAdgUmBRnNJQAYFLR2pKBM434hsEtLBz1E3X8q6+Ft1rAw7rz+Vcj8RkzpVr7yED26V1dpzZQf9cwaqW1xE1NPWnU09akoKKKKAKvhU58L6f/1z/qa2e1YnhEE+FbDn/ln/AFNbZ4FSA09aGoooAbRTqa1ACilPSkHSlPSgBtFFFABRRQaAEpRRRQAzvTh0pTSUAFFFFABRRRQAGm06igBBS0hpKAHU3uadTT1poBD1opaKYDe9LS0UAJRS0Z9qAEopaaRzQAGlFIBS0AFIadTWqQDtQKUdKKaAbRTqaetMAoPSiigBtI3Sn00jmgBFpaMYooAO1KBSUH7tAmch8RVZtFhYD7so/pXS6fk6bbNnrGK574hEjw2rDs9dBphDaTaEf88xVS+ARapD1p1IelSUNooooAo+D8/8IrYf9c/6mt2sTwf/AMipp59Ux+tbZ61ICGkpSM0YoASjGaXFITigAopM0ZoACKBSmkAoACBSd6dSY5oATvRS45pDQAUYoooAMUhFOxTSDmgABzS4pAMUtACGgdKDzQKADFJilJxRQAUmKXFFACYoxSk4pM0AJRS4oxQAmcUbval25pMCmgCilxRihgJRijGKKQC4pCOaXNHU0ANopTSU0AhpvNOPNGKYDeaOadigigBKKKKkAxSYpaKAExSH0p1IRTQmcv8AEDC+F3LDIDjFa+hOG0GybHWIVmePQP8AhFpd3TetaehDboVmOoEYFWthGhSHpSnik6ipKG0Uu2igCl4N58KWHsp/nW3WD4Ibd4Ssc9dp/nXQdqkBtFFFABSYpaBQA3FFHeg9KACihelFABRRRQAU09adRQA2ilNKOlABTT1p1NPWgAooooASilpKAENKKMZpQOKACkpaSgBDSU6igAooooABSGlop3AKKKKAENJThSHrSASlHWlooAaetJTjSU0AuKbS0lMAoPSikNACUUUuKkBKKUClPSgBtFFKOlAjm/HiF/CtxjsQau+GiW8N2DHvGKi8YIX8MXeOgHNO8KFm8NWPPATFWmFjXNFONNxUtjCijFFLmAzfBXHhSx/3T/Ot+sDwVz4Ssj/sn+ddBtoAaaKcRimEUALRRRQAmBRgUo5ooATGKSnUmKAEooPFB4oAKKBzS4oAaaM0pFJigBC3NHWlK5NGKAEopcUEYoASilxxRigBKTJxS9KM0ANyfWlHSinAZoATFGKO9FABijFFFACUUtJigAooxS0AJRilNGKAEooooAQ0lKRmjFNAJRS4pKdwCkIpaD0pXAbilpM0uaQBQelFGKAG0ucCkpcUCMfxUd3he/x2jJ/SovCBLeFLMEYwKm8UL/xTV9t4/dH+VV/BjmTwtaE+hFWgN6k5p2KMVLGN5op2KKkDK8Gsp8J2W0dj/OugzXPeB8L4Stx16/zrou1MBpOaKKKAEpc8U3vTqAEzS0YzRigBKKKWgBrdqRqce1IaAEFOoooAQ0lOooAbRTqKAG0GlNJQAdqKPaigBCM0YpaKAExSjiiigBp60UppKACiiigAooxmjFABRRQOtACGl7Ud6KAEopaKAEopTSUAFNPWnUUANop1B6UAMxSgcUUUAGKKKKAG96WiimhMzPES7vD18P8Apkf5Vm+BW3eFbf2YitbW/m0S8TGcwt/KsbwF/wAi4AD0lb+lMR1FFIaUdKkoKKKKAMfwNj/hEoeOctiujbrXP+CQB4WtwPVhXQHmgSExRilpDQMbgUp6UuKCvHWgBKKXbwOaMUAJiiiigAxmkI4paKAG80ooooAKKKKACiiigBDSUpGaMUAN70tHelxQAlFLijFACUUUZ7UAJ1NGKB1paAEIpKUmjFAAKU0AUuKAG4oxinUhoAZS0baXFACGilIzSUAFGKKKADFJS9s0nWgTENHNLRQITFJTqaetBQUUUUAJ3pcUd6XFAmVNSGdKux6xN/Kuc+H4P/CPOe4uGH8q6a+j8zT7hc4/dt/KuZ+H4A0SdMnK3Lf0qkI6ykpaTFSUJmilxRQBl+CCG8Lx4OcSNXREVzXgUf8AFMoF/wCerV0m09zQIDTTSkYooGFNINOooAQcUHpQaSgAop1NPWgAooooASilpKACigUtACUUpNJQAUHpSGg0AJTh0oooAKD0oooAbQBzTqKAG45op1FADaKO9OoAQUp6UUUANop1B6UANooooAKaetOooAbRTqKAGnrikpTRQISilNJQFgpp606igY2gUppKAClHSloHWgTIbrm2lH+wa5bwCuNNux6XLf0ro9QLC1kx6HNcx4AYmz1Dn/l5b+Qq0I7E0lLSVKRQUUUU+UDH8BZPhwg9pmwa6YjHfPvXM+Bc/wDCOOM8ee4/lXTYwMVIDTSU4im0AFFFNPWgBx5pMUA0ZoAWkxS0maADFNPWnZzSEUAFFFFABRSE4paAExS4oooAQiilpKACiiigAooHNLQAEYpvNOPNFADRS0GigBMCloooAKKKKACg9KKO1ACCjFLjFB6UANoopcUAJQelGMUHpQA2kpaWgBtFLRQAlFGKMUAGKTFLRQAUDrRRQJmbrsxgsDtHLHGa5v4eD9xqYHQXJ/kK3/EhYWCEdM81z/w7/d/2qmcnzt351aEdoc0U4jim1KKCijFFVdAZHgPnw9KPS5k/pXS1zfgT/kA3H/X0/wDIV0rVACU00tB6UANoopDQAuKMUo6Up6UANop3am0AFB6UCkNACUU6mnrQAjUClooAKM0UmOaAFooxiigBKD0paQigAWloA4pcUAJRTqO1ADTSUtFACUUvWjpQAlLRjNGKAEpaKKAA0lKaSgAxRRRQAhpKU0lABikPWlpw6UAR0U5ulNFABRRRQAGkpaaetAC0UUUCZleIgTphx61zvgLCXWqL1y611GtqH08j0Oa5jwMpj1TV4z22H8yatCO2PSm06ioKG0U6igDG8CA/2Ncqe144/QV0hzXP+BwBpN37Xr/yFdEfWgCPmlxS0UANIpMU4802kAopT0pBQehoAM0lNpR0oExaMUpGBTQc0CFpp60uaMZoKAUYoApT0oAaOtOxTaXNAAaSl60YoAMcUdqXtRQAdKM0UgoAWiiigBDSUpGaMUAJ0pcUYpRQAh4pKUjNGKAENFB5oA4oADSU7bSEYoASijFFACGkFKRmkxigApc0lJTAVjTaXFJQAUZopMUABNIaMUpFABQOtGKWgRV1GLzbNgOtcz4WjMHiXWY2/uR/zauulG5Me4rm9NATxtqy4xuijP6tTQHS0UUUhhRRiigDJ8Dj/iWX4z0vZP5LXRn7ornPBQxYaiP+n2T+S10dADaKU0lACU09afTSOaQAKDRijFACYpeAKMUYoEA5NBAFKBimmgLBxRSYpaBhRRRQAYoxS4paAG9KKVhmm4oAXtRTqCKAG0CjGKcOlADaKdTSeaACijOaKACiinDpQA2g9KcaYaAEpw6UUUAFI1BpKACkpaKAEoPSlpp60AJRTqaetMANMPWnGkoAKKKMUAFFGMUUAFFFFDABXNwLj4gXY6brZT+rV0oGe/vXMq4/4WC3OM2oP6mmgOmxSUuMc+tLSAbRTqKAMjwkNqauinhb1sD/AICtdCa57woAs2tKf+fwn/x1a6JutADKKDRQAUnelPFGeKQAaSkzS5oAKKTNGaAFoIoooAKTFLRQAmKMUtGKAFpDS0EUANFB6UAUp6UAIOlGTmlHFFABwaO1IBSkcUAHakwDS44o6CgBMY6UmKdmigBMUvajNHUUANoxS4pKACk5paKAGmilNJQAUvajFHagBtGKKKAA0lIabmmAppKKKACnDpTaXNIANJSk5pKYBR2opO9IQoBz79BXMXHHxEtx/etOfzNdRk5UgdGBNcpeny/iLp5J+/AVqkFzrWHGKSnHv7UmKQXEopcUUBcxvC3zXuuL6XZ/9BFdFiud8LLi911wD/x9c/kK6NuBxQMaaQdadRQAED1pMDFJ3pcUgEAoIpcYptABilAoFOoAaR0pKcaSgBKUUU4dKAGHrSilPakNABRnik6nmlxQAlFLiigBKUClyKTvQAYooooAO1IelL0pCeaAEFBPFBNJQAmaUHiikJ5oAUmmk0ZzRQAmPeilFB60AGaKDSUAGaXPFIelIOlAAaM0GkoADTadTe9ABRTqKAG0U6igBtFKaSgAoopR0oEwGa5PXCI/H2it/eGP1rre4Nch4hX/AIrPRD/tj+dUhHYnofrRS4opAJRS0UAYvhcGO+15c5H2rn8hXRZyM1z3hhgNX8QJ1/0nP6CuizxTZQ2iiikAmBS0UUgA9KZTs5pAOaAG85qQdKTvS0AIaSlIzRigBO9O7UmOaU9KAE60YoFLQAw9aeOlJjmloAQ0lKaSgBMClxRRQAopCaQnFIaAFzmmnrS4NJQAUUUUABptKTRjigBKKKKACiilxQAlGKXFGKAExSYwKU8UhoATrRijpRmgBKTHNOxRigBopaU0lABRRRQAYpMUtFADaUUYo6UxC8YrjvE7lPF+hEcDzQP1FdjnvjpXHeMiI9d0SQjpOB+ooA7X0x3FNGe9OBwij0A5pOpoAXFFLiigDC8NIV8R6+p/57g/oK6UD5a5vw+xPivxEp/56D+QrpB1pjEIxSGlPWkPSkAlJnmjpQaQC9qQdaUdKKAE70tFIRQAtFAGBRQAUho70tAAOlFFIvWgBaKWkoAQ0lKTikxmgAoooxSuAh7UhoYUo6UXAKaetOpp60XAKKUUtFwG4opTSUwEpBTsZpMUAJSjpQRSjpQAUUYzSYoAQ0lLRTFcSjFLjNGKAEopaKQxppKeaSgBtFOopgNop1B6UANoxRilHSgBCOCK4z4gjYdMmHVZv8K7UDLL9a474hDGmWUp6Cb/AAoA7KE7reLPUoKeFqO2Ia1gYd0WpR0oAXFFFFAGBoeF8ZeIQD/Ev8q6SuX0QA+OPEAzxlD+ldQxxj3oATqaTFOHBpCRQAmM0mBRmnAZpXAb0pccUY5paAG0tOxSY96AEpD0p2KMUAR04U7FNNABQBzTgKDxQAhpKUnNA6UwEIz1oxSmkoATFL0FJmjPFFgEPNAFITSg0WACDTcUpJpKLALiiiigApMUo5pcUrgN6UZpcUYouAnUUAcUuOKMUAGKMUYopiE2ijaKWjNAhpAFFKaSgYhFAHFKaTOKYwIpMUpOaSgAxSUtJSAKKMUUAFFLjigCgBw4+ua4/wCI6Z8OIQPuy8fpXYY6H0rmPiCmfC0jej5oA6DTSH062YdDGuKtYrP8PHf4fsD3MQrSoAbiinUUAczpKbfHmvR5+8FP6V1PUnPauU0YE/EPW9x/gXH611nJHPWgBvWm7TmpAMUtAEeOacKXFLS5QG96KcRmkxQAlJj3p2KQjmmAgFLSgUp6UANpp606inYAHSkNLQaQCbaSlyaMZoAaaKcRSCgBMe9IacRQOlAEeM07HFOooAbijFOpKAG4xRinUUANxiil60tKwDaKcRmkxRygJRTqaRzQkAUUAUuKYBimNT8UbaAGgcUYpxpKAExSYp2KMUARmkp5FNIoASilxRigBKKXGKaetADu1ApV6Uo60MAFc/46Xd4Suj6GuhxWP4uj8zwtfgjhF3fpVIB3hWTf4asmyDiMVtgVz/gza/hOz2jGE/rXQ1MtwExRS0UAcnpZA+JWsrjOYVIwe/Ndf1bgHFclpAH/AAszUeP+WA/rXVjrVWAU5P8A+qgA9Sf0pB1qQ9KLCGHnp/Kja2OB+lBqROlTYRHg+4/CgnBAwT+FPPWlPaiw0RnPoR+FJ19fyp5qWIcUWGQHPofypCCOufyqx/EaXqRmiwisFPXrQVPXB/KrygY6Co3+8adhFUD6/jS49h+dTR8sc1IwGRwKVgKu09Plz9aTB9vzq4oH2g8DpUTAYosBX59qTb/nNTd6KAId3YLk0FW9AKVjzQWOzqaAGgH2oyCOAc0Rsfm5NQl2EyjccZ9aBomwcdvwpwXI6GmSOwHDEfjUHmyf89G/OgGWCvPpSqueMA/jVJpZMH52/Oqck0oziR/++jQI2NhDdKcR22/oa5w3E27/AF0n/fRqCW6uA7fv5f8Avs0FHUbT7/gKArn+E/lXGzXdyAP9Il/77NQi8uuf9Jm/77NAHcBGPY/kaXy3/uN+VcMby6x/x8zf99mq0t9d7j/pU/8A38NAHoO1h/Ax/ClCsf4WFeatfXeD/pU//fw1B9vvOf8AS5/+/hoA9QKOD900u1sfcavK/t95n/j7n/7+H/Gn/brz/n6n/wC/hoA9R8sn+Ej600oy9Qa80hvrsk5up/8Av4amN7dYP+kzf9/DQSz0Yq+BhD9aawcNjB/KvPft13sH+lT/APfw1L9tuvLX/SZv+/hoGjvCrZ7n8KaV57151c3t3kf6VN/38NUXvrv/AJ+p/wDv4aAPUyD2BpMPXlYvrzP/AB9T/wDfw01767z/AMfU/wD38NAj1fa5/gOPamlDkfK35ivLGv7zZ/x9z/8Afw1Wa/vOf9Ln/wC/h/xoGj14ow6K35igKcYwc15Cl/ef8/c//fw/41Kl9eb/APj6n/7+GgZ66qHbyufxrL8TKf8AhG78HPzwt3HpXm81/eb1/wBLn/7+Gq95eXT2jq1zMykcguSKBHf+ApGfwrFkcIxX8K6iuT+HpP8AwjSDP8Z/pXVjpQIXFFFFAH//2Q==',\n", + " 'file-path': 'D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_agent\\\\blocks\\\\out_json\\\\test\\\\figure-1-3.jpg',\n", + " 'pdf-id': '9f36b3cde65f4356b896f17bac0a83c5',\n", + " 'image-uuid': 'efd4abeb-178f-4ba5-a912-6795d8c5124b'},\n", + " 'Sprite 4': {'base64': '/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAOKAgQDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwD1HqaSnO6gbnwoFUZtVtYgdp3GuerWp01ds2jTcti6Bkjmq819DCSC4z9ax7rVpZ49sY2g96zgGLZkyc15dbNoR0gdNPB3+I3X123GQikmqcmtyuxCIQKzCFU9KeMY4ryauZ1J6I644WEdSR7u4kfO84qPEjHLPn60Zpc1ySxFWXU0VNLoGwDk0fLjpSE5pcYqHUk+pVl2EGBzikIz3pTSkcVN5dw+Qg+X3oPHPf2paSi8+416ASeCDRlhzuIopKpVZrqJxT6DhLKDlXb8atw6pcRkBvmFUxSn9a2hi6sNmQ6EWbCa3HkK6MPcVdh1K3l4VyD71zITPUUhTacpkGu6jnFVaTRjPCJ7HZA8ZHIPelAzyK5eDU7iAjdllHGK1INWicjedpPavWo5hTn11OOphpI1DxR1qNZUlwVYEU8e1ejGcZK6MHFrcWkHFPpCMmqATrSEilA5pQo6mgBMUY5p1IeuaBMTFFOHNBFK4htKKU02ncYpGRSbad2pM0XExvPoaWnUZxRcENopSO9JQUIKU9Kd2pD1oJG+1N280+g0wAU007tR3oBDe9OwO9Heg0FDTS0vajGaCRO9IRTsUUDQylAPpSmg5oGIaSndqSgBBSnnpRijGKCRu0460BadRQAhoAPpSjrS5xQA2igmjNABRRRQM5y91GS5+VSQtUtmMUpzmg5r4atiZVHue3GiogWG3bQCTSFcc0orGRoNPJ5p+BikIpegqRiDrTto9KaCc9KeOaAuJt4zQQcZo5xjNB6YzQA0HnpTm+76UAY5oI7mkwEo7UAZpQM80AIPelwKT+KnZouAzFIc5p/4Uu2i4ADSd6MYNGKG7gL0NNKBmzTvwo4pxbTuhO3ULe6mtZc5JA7V0VpqMU0YDttauezx05ppHoSK9DDZjUpPUwq0FJHYKylc7s/SlGa57T9SNu3lueD610CSo6hkGc+9fSYbFQrq7djzZ0XEcRzSHBpxI+hpDXXdPZmHqJgetGOaXb7UDp0p6dx6B0PHSlzR0FIOuO9LQLICR6UYHpQfpSE+1GgrAelAAoBzQc+lGgWQvFIQKXOBQRmjTuHoIOepowM9aCMDgZpAtC10QO61Fo+tOxSE4pgGBSYFANLimAmBRjmgnFB60ABHNGBRnJpaAG59qAc07d7UnU0ABpKU0lABijNHagHNABRgUUoGaAG0GlNGM0AMzzS0Y5ooAB1oNJ0p2aAGgDPNGOaU8jFLjigBnNFLRQByJYY4puSe1KVHalGcc1+fn0Fgb7tIKO9LSGFHekKn1pQtAAW7YpQaCuTRjFK4BjNKFxTRmnZouA0kil+8OtGaQdDSbAUUhzTh0oAzSuAynkfLQRxTh0oAYKdRigDFOwBilxThS0AM60hGKfimnOeaYhMGkp/ajt70XBjSMdhUsU8tu29HY+1Rn7wpexrWFWUNmRKmmWjq1wxwF6+tL/at0o+6KprQRmun6/VWzM/q8C7/AGvc4pBq1yaqLjFN70fX6vcPq0C9/a1wOtImr3GDu69qp96Q0v7Qrdw+rQLY1e4zyKedWuCOKoYpRxT/ALQrdw9hDsXP7UuOuaVtXuSMVUwKMUv7QrdxfV4di0dVueKDq1xt61UpoprH1e4fV4F1dWuMZJrT0/UhcKVbhveufPSpLNilwgzjJrpw2Pqc9myKlGLR1oORR1pIhmKnYxX1EZXjzHlyVmJilooHWrWwWGmlxS/w0dqBWExiigjIpRxQAmKKUfepO5oAMZoxilooAQUbR60tIetAWDaPWkPFB60mCTQAUUo70lACd6XFFFADQM0U49KQUAIOtOoobpQAnFFAHFFAHHk4oByKDz2oHFfnx9CKoBoPWhTg0Y560AIcelOHtRtyaUdKTATnNL1pTzSHigAIwKaTk0uc0FelIBMcUmKd0opMBD2xTlzjmkxTqBCZpDntRT1GRQMQZxRinAZpcVVhXExxRzTu3SigG7DCSOlLgnrSk0HJpAJtNBXFHNIxwKLhcKM00N7UvWgYEcZoyMdaCMik28UACml4pAuO9KB70AJ3owaMe9H40AAzS5pKBRcmw6ikzS0XAKaRg0uaQnmmtwEpV4njNA4pFO6ZMetbUdKqFU2OvgJ8lfpUnXFMgGYF+gp4GK+3pfw0jw6m4GkpTQBmtVsISlApKUUAxcCkwKWigQYFNI5pc0daAEopSKSgaCjFFGcUAxO9GcUhOKWgQtIRQDTulADcUhp1IRmgBtGKfikoATFLgUUYzQA2ilxRQBxvPrSgetGMUY3Gvz4+hFI4pNvvSFWJ607aQKTAUdT9KM8Cm5IpQO9ACk45oHzUfeo6UAIRilzRn2o61ICZHrRSBfmzUgoAbmgfpSY5p4HBFAXtqQvcRRyeWxAJ96d9ohA+8PzrHv8AJ1EjsAKFiyT16V308K5RueJXzRwnym1FOj/dYH8asEd657y2UjYxFX4L3YNsmTionhJoulmim7SNAjI4pD0qu1/EBxVYX26RV5wTWLoSW52xxdJvlRodKTNOzkCm4xWL00Oi99gzTfvHmjODmjGeaBiAc048ClVcc0mC3AplBRShTn6UnQ8imAhNI3OKdigKKTAYVpcZGKcRz7UoHpSAZjFKBu4oKsKOcdOaAF27aMikGe9G3HNFgEozilwaTHNXYTADmljH75PrS44pFB81PrWlL+IiJ7HXwHMC/QU7GKhhz5CfSpq+4pfCjxKm4tIeKUUhrREi0UdqKYBRQaRetAC4FHAooxmgBD0pKXFGKAEoxk0uKOlACMBSgcUEZpTyMUAN47UgNKBSYoAWjNGOKMUAFHFFHFAAaBTdwziloAMiil2iigDjmIzgil24AIWrlzD/AKSMLwTWhPBD9kGMbgOa+GVBs9v2higHJwM4q5BbrJbs7dRTrOIbHkYcAd6nsXzFLkDaK0p0ddROoZLIdxJHy9qQEFeDWr9qjMgQxLgcZqrdQBbg4wAx4AqJUL3aF7SxVVc9BSEHuB+NaqiG1iyy7jQpgvgyCPy2Xue9H1d2H7Uy15P3aChB6E5q5ZoPt3luvyirM00VvK4CA4PSj2CSH7QyiuOuQfSjI9K1bhEls/PAAJ7elZZIAzisJU+V2KUriLyacqkEnNLgdQaUnAx1qErMctjCvTnUWGO1PXOOlNu4z9vY+1KvTvXu4drk1PjsZ/GnYkCjr3puMn1p4ZcdaQ8Dit3aRzLRaiLFg4PNNVCLlPTNS7sr7ioRITcRgL1NY1oJQNsM2qiszc2YAxSFaUk4HPamk14E9z7OD91DTjpikXrjk/SlbkcDmr9tstbUSOoLN0zWkKfMNuxSwQMHg1bs4VeOQsASBxViGeO43CRFGBwaLNMGYgjB6V006CM5VCpG0aSMCu7iq7fO5OSB6VdtI1NxISM4BpsMJmuSABt605UUhe0Kuxyvyrn3xSKrb1UjHPJrTe+WJljWNcL1NRXRSYrIMDvgVCpq9h87I76OOIRhRyRzVZFbacLWlPbvMIgBxjNRXDLDiMDkd6qph7K4o1NSiQQ2COfajypPvENj6Vfso1+aR1zjmpxd+Y2zYAh4xiksLoEqupkEAigAtwAxq9fWoSRNo+VjVqRktIFVVUuRUrD2K9qUbSASO29TgDvVZwFkYEcZ4rYtrhZkYEAMR2rHk/1jfWqr01FIIybY3dTgD5qYpoAzT0+WZCeR6VjS/iIqpsdTb8xKPYVKeOtRW5BiUgdRUo5zmvuKPwo8We4gOaMUuMCjNakITrRgincAdKTk0DYlHSlwaTFAg/Cg59KXmjJoAZk5pwoIJoAxQAGm08jNN20Ahe1FGKMUDCkIpaBQAlLS0YoAZikNSUhANAMjVBnNPwKXFFAhMUUtFAGQqiSJJOpHWo1YNPIhOQTxUNveCCAqwzmq8EgFz5jHjPSvk4YiKR6rpsv3S+RBtBAzUVoCNPlNRXt2tyQEGMUltdCO3aIjOawnUV7oqMbIrLlnA9xV694lhHsOaoq2XDYwAanuphOyY42jFTGqloNxuaEl1DCqh49xxUKXsI3ER8nuKghu4wAky59DUjXcKKRHEOa2dZWJ5GLbSK99kDtVO5P+ly855qW3n8u53kAZqO4YNKzgdTWEqiZpyl1ONN2nrWZjt71b+04tyh6mqueprGpK7Lih2RQPYU0U8ZxWY3sYl9G76mVDYBArTg0+OKMbzkmqE+TrAB9K2gQ2Oegro9q0tDzY4eE6rbW5Tk0+IoQp5qlInk4Ug/jWyuO9IUjfqoNEMU0FXLIS2MQyLsOOtFtZyPMH6AVrm3iB+4KeMKMKAKqriZSVjKnlihK43bxyabjFOJPpTc5rhkezFaIB1q7cnNvF8pwKpHir1rcoqYl59BXRSMpq5WRXkO1Ack9qvWO5RIpzkUsl9bhf3UWxvWoILrb5hZgxat1NLqZu76D7E5uZPTmn2g/fSqpHQ4qC3nWJy2OuaiWZlmZlOMmn7VAotkblllYNgH3qTy5PLBYEDtVxbyAj95ACw71WuLhpmAC7VHapUkncpptWNBrnyPKDDggVV1CLc3m54IqGe4EoRW/h709LoJAYmG4noTWs66krEqDRZsZAtkTjJpv21QdvljNVYJvKbGflParP26EZxECR3pqsrWE4NjZrkyMoYY5FLqClmSQcgCqs05lfO0AVPDfFV2sgYe9Q6ibGoMLEguWAPQ1TkG12z3NX21BAhVIwufSs8neS3qayryTNI3TEA4zS/wDLRBRjilC5kRvSsqX8RFT2Optl/wBHTjtUvQmorVi0CVN1Jr7ei/dR4s1qJ1owKMc0tak7DRzS520YIoxmmAFsikpduaMYoCwmaM07FJtzQISinUcUANooNOHSgBtFOooHcTFIeKdSMM0BcbS9qFFA60AFFOxSEc0AxKXApCe1AoELgUUlFAHHnpikWg0Divz7U94dRgZppOKcOaNQsGMimgbj15qTtTCOaAsKV7E5oA9TSdDzzQQMZpJu4B1NKTigA0p+lPULCZBPNLgAe1J1FO6jFA7AFpcEY+tIDxQG5oCxQktmfUDL2FaCqAnvTgAAeKacEdKbehnGnaVxe1IOKXOe1IRU8ppyvuKfeo2HpS5zS4pju0GTikPWlpD1qbAJSAZOTQTzilHFF2tgsLgdaTNO7U0rnvRdgOz09qQkE0gHal4FGoBijpyKAc0ZzRqA2lFJSijUBcCl74pOlB45xVWGIwxx3pe1KVyuSeaQMOlGqQhAMU7BPSlPSkAJ70XfUTAggdaeOqAUznPWnA4dfritKT/eImex09txAmPQVYI5qvbj9yv0qcnvX21H4UeNU3DHNLgU0HJp+K2IYwk0UpGaMUwQmaKXFGKBiUopDxSigkMCjaPSlpM0AMxSKTzUmKQLigAopcUYoAbzS0uKDQAlJgU7FGKBobzS0E4ppfnpQDFwKXFA5ooEJiinAUUAcWaC3alxkUbcGvz498TbgUZIp+eKSgBM0tBHHFNGe9DAcOetIOtO7UoFIAwcUhpx+5SdqYDVpc+lBXvQFxzQNAeKUClIpCCMUDHdKQdDR160vFACCkY0u33owAMmgBAOKTafWnDmmk5OKAFpD0zS0dqGAwc0tFNAOaQDwRSgim4w1JghqAHGmk085NGKAGrR3pVOWo2/NQAnTtSKMHNOJ5xS9qAFIzQcAUvamYpgOb5gCKTZxmgelLnPGaAGk0A460tIRk4oAdwehprEh1xz81GCq8UAEyxgdTWlJfvERU2Otth+4X6CpcDFQw5ECD2FSd6+2o/Cjxam44ClPSkpa2JEopVooASilHWg0ANNLmlAppGDQAHpSU8dKKZLE7UZoNIBmgBc0UAc0h60ALSHrS0UAHaiiigBCKQACnUHpQAmfakxSrTv4DQAnail7UUAcP5y9iacsqkYzzTGRT0FRlML718BY+hsWe1KAKiilwhVqlBGMiiwC4pMClPFGKTAQAlqcAc9aUdKbyD1qbAO9qAB6UmfagkbSTxRYmwMygc8CjzEA61XJ3nAPFPVFxyKaRcUKJ1zyTTjNGQOab5Y9Kb5YY9MYp2HYeZo8lQaTzoyMDrR5aKScc0BF9KLCsOEiY60GSMim4X0oKjNFgsOWRAMCmllHOaNoJ6Uwrziiw7D/NQt1pPPTJFN8vnIpRGOeKLAL5ydjR56DuaTYMcCgID1FKwAZ0JzmgTKT1NJ5YPalVBnpRYBWnQDrQLhKb5YJ7Yp/loB0osJiCZAc4p3nJnJpAoxnFBVSOlOwhvmpnnNOEqH1o2rkHbSFR2GKdgFMyrxzTvMTH1pmznBxRtye3FBQNIueCaaZVXsaeUAPSgqp7UmgGLMrf3qd56Z4U0bR24pdoHaiwAJgexqS3miW4DSKTjpURXPFLt2nOauD5Hchxub39t28YAKtihtetsA7GrAJ3HkUEDGPevTWazSsjF4ODOg/t633YCNSprtuW+ZWxXP55yBRkHtVLNqhMsFG2h1NvqEF0x8tyParYzxmuOgkeGdAhwM8118ZJjUn0r2cDi3WhqcGJpezY88U3OTTutJiu45hd2Ka3NPxTT9KAEGaeB60KMUpoAbRijNFAB1pMAdaU0n1FAC0lLSd6YBRQeDRigBKXFGMUvSgBMelJnHFKzcdKQDigAJ9KKMUUAcT0pQc9qOtNJ+YV8GfR2EdM8imxzujbWXA9TUrYAz3o2BxzSYmiRGDdTTmqrs54Y04I/96kIsdFoHNV8ORjdSiNv71FgJWcLUTN53sKcY/U5p6qqrRYqwxI9vvUgxSYyOKZtbPWgaQ9nA6Um6jiigdgb1pAaWigBAOaXFA60pOKAEpuMmnE8U3NAWFpM07GaPL96LisOTA5phU5yCKUgAdabmi4WAd6VetApSKAsJ0BpSRjrTc8800H5unFAmh4Py0tGMUA5NAWAHmkZvSlXGTmg47UgsID8woP3qTGKXrQOwhNJmjHPWncUwsLxikzRS9qGFgB5zTSSxp3akp9BrQQ8UCg0uM0gEooxQvWmJoVTiRT/tCuzjYGFPXArjuN6f7wrr4h8i49BX0GTNtSXY83H7kmaKCvNHSvcPNA5oUZHSnUZxQA3nNOzSE5FJQAuBSUp6UijJoAKM+1KByaWgBtFFGM0wEHNLRiigANBoNBI70AMNO7UhIpT0oAKKXtRQOxxHSig9sd6XBzzXwZ9FcQjNKBRilHFJiY3FLzS0uKLAAVe4pCB2p2KTFKwCAU4imjrSk0DHDAFITRnPFNxk0DQYpRS9BSZycUWGFFKRikPFAARikzml696OlAAQMUwin45oIpAA6UjE+tHSgHNACY55oI96VulAUYoABTWzTvpQRTAQKMjilYANinUjdc0hMKTOGxQGyelLjnNAg20lPA5600rlqAEOTRtx1pChz1pVBHU0FCbeaAOeakGKb3pgGKO1FITQAvakoyMUDmgAxSAnNOxTQvNACnrSqBmkI5pRxTW6ExNv7xec/MK7OEYhTj+EVxgGHU+rCu0iz5Sc5+UV7+TbzPNx+4+jA9KDSYNe6eaLSE80tJ3oAZhtw54p3SgnPSlAz1oADzSjjpSDigUAB6UA0HpSCgBQKBxTuAKaSKAAjikozRTAMetNYbqWg+1AEYUhqfzR0pQc0AL2ooxRQM5eDTRJEJJJgi54JqvcxJBJhJA49c1qhIRpojdiR1zVKPT0uHJEmIwMk18jOiraHsRm09SjmgnHWtFbayY7fN56fWq9xZtC4A5U9M1zewkzb2iIOvSjIx1rSXTo1gEkku3NVWgjknEVu2455odCQKqiDORxTc1qNZ28bBJpMOB0FQXdmsUazQncpqnh5JXEqiuUwO+KCcVb+yo9j5yE7gelRW8JmlWM9+fpWfs2Xzohxzml47datS25FyIohk96sNZ2sJxK/wAxHOO1XGhJq4nVSM3tUfRs960Z7DyVEiHchp1rp63UTSFtu2kqEm7EuqmZ5JIzzxSAg96tXKW8YCxybmqmo61k1Z2ZrB3FHXrTiMUgXFLUlBSbvelo2gc0gENC049Kbg0AKRQTgUmT0obpQA0EinA4/Gmk4FKpDUAOHXNBowcUUCY3GO1KKU5pvOaBC7jmlzSEHrQAetAC0UmDtoHSgAzTu1NPSgA0wFpCRSk5pCKBoQgEUgbbTgecUu0GgYZozzQV4pMYoAU0lKOtHenHdCYMcquBzuFdjBxCmeu0VxrZ3Jj+8K7OLHlIT12ivfyfeR52P3RJRmmck0te6ecOyKT+KgClI5oJGr1pTS45pKADNLSZ9qQnnigBSeKQUDGadQA2incUYHagBtFKTxikoAOKUYxSYpQMCgBpXvQKd2pKAFzRRiigZzu7dpBI9cGpbYBdKlIz07VXSeEaa0RcA+lRWN55e6JydjCvmFVTjY9acGncrIPnU4PHNat8S1lBJjkdagEFqs283Ax6Yovb1JVWKEHavesoSUWOWpNfZ+wxHsR0pNHQNLuAG4etR3FzHJp0UYb5lPNQ2V0bS4DDle9N1FzCcXyl+5gimu2ZpwrdxTX8hLN0EitjpTJltLmbzhMI2PUGoJ0t47cCNt7HvVzqJoUYsk08lw0BOcjinWMDRy3EjLjy+KqWs3kTK+cetaF5fQ/Z38thlxzWdNxmrlOLuQ6c2/UGLdDnmpZrS385mafBPvWfa3AhkRm5HertwlpNhzNtJ9qqnVVmhSi7k+YktpI/ODDbx7UlnxpcmDlu5FVZjaJb7Ebcx70+0njjspUJAJ6UnUS1Gosy24PHrSgAClwDk56mkbpXn1HeVzrirITk96QgihQc9acakoZ25px5wKO2KDyeKQBSjikpM5oAU4pKKKAG5A6jNGAOgxS5HcU3BoESg5GKXApig0/igQ3+LFHTPFHfNBz1oAAWPB6U8GmA4FB59qAHmkYdKTcDTsigBmKXJpTjtSYoAMA0YJoApc0AN20oAHelyDTeCaYCsfSo+c088UcelBQAcUuKTNLmnHdCYm7DJ/vCuxjI8pOP4RXHYG5f94V2MRBhT/dFe/k+8jzsfuh4H5UuBSDOacOa9084UCgnmjNIeTQSDGgUhFKOKACg0UUAIAKWiigBDQoJ70pGaaOKAFK45NHFISTRtzQAoPtSZp4BHekKn1oAbTgARTelAODQA+ikzRQBxOAOcZo5z1pwpO9fDNs+iDHOckmlJ444/ClFBpahZDNoxg/WkGM9acaQYzSafcatawvGOlKMAnPI9qGxigUXYWQmfSjBzzijGKMmjVbMTjcU/TgUbif/AK9IM5oNK7Q7IRie4pM4I6nNO+9QTii7G7C8Z46UhpdwIxijikAdqbnNO7Ug70DEo70lOPSgAxxTc7c0oNNPzCgBdwI4pozmlUYqQYoFcZRg5p+AKMg9KBXEI4603FOPSkpAFHajFGKAAHAo60YooANvvSYPSl3D0o3ZoARflPWnZHrTcUuKAHZpGz6UAjtSk5phcauaO9L0FJ1oABzmkzThxSY5oKFAo4z1pc4FMHJpx3QmLn5l/wB4V2ESkwp/uiuPbhkx/eFdpB/qUz6D+Ve/k+8jzse9Re1C9aU0n0r3Tzbi0U4dKQ0CEopf4aSgBMZ70uMUAYpVOc0AJRSnpTDmgB1Np1KBkUAMpwNBFGKAFyPWg0wdadmgBpopcZpcUAN5op1FAHF7T60YxQc+tJmvhj6IWkbOKWgkHjpSuthoTqtNA496kAwMZowM0MLjQD3pwFDDv2pN2O1TdDEIOetLkdqUc0Kuc80xAKZzu9qcflbHWg80mAgBHSjHrS0Z5oSuMMUu0elITg0oOaT0ASkNLnnFJ1NOwxMc0Glxg0ED1FIBlA607BpMc0WEK3SgdKDyKOcdKLCFpQAKKQjNAgbGOtAAx1o20uMHGKBobzSnNOApSKQDNpxkmlAFISaUe/FACYHrQVp2zjNBXAzQA3pRgjnNOUbqMc+1FwYzbS89Kdj2pp6ZxTuCQdODQOtGCRSqMe9A7CGgnApepoK57UrjGg5p6gelIEwM0oyO1NNXQDWX94mP7wrtIv8AVL9BXHA/OnH8QrsY8+WvHYV7+T7yZ5mP1Y4jIoGB1pc0h5r3rnm2Ybge1HejjsKAM0AB4pvWnEE9qb9KAFz2pVGKTFKBigAPSkpSaSgAFAJoAo6UAO600tinKcmhivpQBHilAo3D0p4oAb0pe1KelJkYoAMUUZooA4o03FOI6UuK+Ee10fR3srsB8vJ6Unyk1DLJ5j+XmnJCO7GveweTOtBTZ5lfMVTlyok3D1pNyE4LDP1phhV/4iMU0Rr5g9q7HkHmc/8Aa9nexNtB70Y21FKhyChNOgdnZkcYIrzcdlU8OrxR14fHwrvXQkFJ0NPIGOppm3NeOegAXJzSNntS/N0owaTABSGnAUm09qWvQaEAzQeKTJU81C07SSbFHHrXfg8BPEPQ5sRio0lqTB0DckUolUk4IqJbcHO40qwhDnPFe1Hh3uzy5Zv2JDKh/iH50wSIT1FIYF69qZ5K54Naf6ux7kf2uybzYzxRvjXvTREMcUgjGeRR/q7HuH9rsf5qDuKXzF9RUbRA8YpwjGM+lH+rse4f2uwMseetOEi460gt1bkU14Bjrin/AKvQ7j/tZjy645NNaaMkDNN8jGOaV4QpB701w9DuS82fQPNQHk04TR9N1IIlbtSmJARxT/1ep9xf2tIeGU9Cc0hkUdaYYtzHnFKIlHU5o/1fp9w/taQecmOtHmoVppgGeKf5QUgYqv7AgH9rSEEiAc5FAdCetK0KnvSGDjg0f2BAP7WkL50Z4zQ0iAcc00QCniEAU1kEA/taQgkU4GDTPNQNg8U7YVORS+UpGSOaf9gwF/asw8yEd6Xzo8dG/AUzylzUnkqR0zR/YNMP7VmM89COA35U3z0zjnNS+SF7GmmBByOtNZDTbsJ5tND4GR5cv8qgg810kWq2hAUPgjA61zOzOM02SJQuASGPSuvC5bCgnY56mPlU3O2Vg6gg8GlxWFoV65T7JIdzL0at0UThys2hPmQYxSikyR1pQwJ4qChd2B0pu7npSsaD0oGhvenUmKBQDDvS8UUhoECjFITmnHpTOO9ACil2+tIACeCc05lbcOaAEwBSU4jFMLUAOzRlaj3YNOA70ALRRRQBxh4PPNLg4zSfXmlU5GPevho9D6GWzKwUeeWx0qcDNMwROeMCpFPOAK/RsArUInyOLf71jioxTB0wKexFIhG6uxnIOIAXOKjZTkuByalVhkgikB+bg5FZ1KSqKzLhV5NiOJmJw3FS4OeaYyBjkAimMXHQ5r5fHZHzSvTPaw2aWVpEpBoA96hBmJ+7Tt0o7Vxf2JW6nZ/adPqPwT0pGO1CfSo280dBS+W0iFW4NaQyOo3qZVM1ppaELubjiM7fWpI4vLXk804RLGML1oHvX1WEwkaELJanh4jESrSvcUfMcZpGU8jPFPRRnIppZsnHFdZgOUfLtNIyYIxxQshIIP50KTyD+dLqJvQRRzTmXAzTQ4GcDOOtSIJpv9TEzDsfWiVo6jjFvYi7ZzSAEKR61c/sPUpxuChB6Gp08P3oA3MtZurE0VOfYocRgZozu6dK0H0C8b+Jaj/4R+/HIdfxNHtIh7ORTOc8UjA/xVeOg6ipDBkJ9M1BLZXdv/roTj1FNTixOnIiTHpQ/I4pqv8APtz83pT8ZOM8iq5VuRsMKnHHU0YIHNOLZYcdKccEUWAavSjdjrQM5xihRkkUwE2gnNOwAMUmGz04oJy4OOlAC8heRS44pZJNy8U3cMDFAC4pNpINJvzxilUE8UAJ5dOAwKUsVHTNBOVz0pAOIGM1GF4Jp27IxThyhFMBnNJjJOfSkJI70gfk5oQi1oYP9pMOwHFdXgZNcvoeTqMhx0FdSRzXnYn4z0cN8A3GetAUDoKXj1oyBWBuNNApSc9qBQNBRSE4paAYUcUhPOKMUCFoxQKTNACjrQSfWkDZOKDQAoORzzTcdaMj1pc0AiIrk1KOBQPWlPIoGNNFBFFAjizQvHPvThinccDsa+Hitj6GWzK827zwQeKeCcdabKQbgKO1ToFxyK/RcA/3ET5DF/xWR4zT1TmlKDPBpT8q+9dlzlG7dp5OKagKZHrTiRgZoUjPPSlbULiqPekIA5PWnEgMNtNYjNPQBUO4+gpHHvSrQaVhgWCgUgI6ikLZ6inYGOKZNktQUbgTQyjbxQDgYoxjjPWkgY1WyMAYpQuM5I5oHGaB90k9c0yhoUYNIx2LuJ+Wnudqe5pLK1kv75UAzEv3qmUuUqMeYnstLnv5VaQbIhyT6iuqgt4oEVIxhQOOKWJEiQRoPlAxUoGBXnVark7HdSpcquKW4x1ppIxyP1o70YrF37m6EA+tLjnH86WijXuKyE9c/pSEqww3T3pT0poHPNNNrqNpNGHqmiea3n2/DDnArIU7ZNjcMOua7QDJ9qwNd0/JWeBeV+9XTTrO9jjq0upmYHJyKQcmmqQ3PT2qRMZrtvpc4467jAcNzmm5XdkA1KzLu6UzILUBcfkAjmkPJpeKQsBQAgAAIoKoMdc07jFIxx2o1AQZz1GKdnB6UzqOBSnJGfSjUYjbjT8fKKbmjJ707AOCgDOabnANBPFAI9KADB7ijaKVnzSE/LQBc0Fv+JtIp+7iunBzmuX0Mf8AE0k+ldQeBXnYn+Id+H/hhxTTgnilPSkGc8VgdAuaKdzSGgBppe1FGM0BcaOtOoxRQAgpp60+kxzQAAUhWlJ5xS0AN49KB16U6igEHSiiigoZmilooJONIGKVACeaTNAzkY9a+Hj0PoXsyELi7JNWehHpVeQkXAqbriv0LAv9yrHyGM/isdt28gA0gBLcjIpeiYBrUi0zdbpJvxuFdbdjnSuZciKSOtMKj1IrYOkowO2XJFIulw5AklwT2pc6HyMxkGTwelPZRnOM1evrIWhGDuB7ipLTTxKm+Q7R70c6Fyszd/oCKC3IOK2n0qDaCkwJPaqUForSOrDGDinzILMp4zzxRuH/AOqpJoNszJyMVaOnxrZiZmwTRzBysz2GTxnNOX5mGRWpaaZFJD5khIFOfToDExgYsw6ijmQ+UyCuCcUBdo+Y/SpGAUlT1FMVs5yKtMl6EcrDbnJJ6Cup0iyW0sVYAB35J9a5ncnnIrLwWH867dSvlpgcBQMVzYmVkdOH3G9OKcBkY6UhpMkCvPWp3MkCd8ZArOOqwf2ilnHuZiOT6VFq9ve3m2C1do0bG5h2rNtrEWHiGKISF2KZZjTA6bgZzWPqOux2beTDGZ5O4Xkir2oyGHTbiboQpxWN4VjSa3e8YB5HJ5PagC7Ya5Hdny3jaGT+63U1qD5hkVi69AkMlrcxgBvMCsfXNbaYKoRxkUmAuBjpTJEVxtIBBGCKdilAwcn8KcXZiaTRwkjeXqE9vjGxiR9KkGT04qbWEVNakZRyyjNQj5QOa9SDvE8mSabQZ+bB60cDP1pxG4hQMse9asVlBDbq9yeTQ3Yajcyc4wamgthcTYzgBcmtSSCyeNjF1CnHNGl+Rs2tw4Bz70ucrkZjuhViAeAaG5X5fzrSvBbvMqQ9SeRU4t7S1VRMMlueKftA5TDTI96VSeVxWzcWUDxGW36AZxTLC1iltnZ1+YGl7QOQx2UlgF4/GnYwPX8a2SliHCBFJPGaq3tmsUiKq8Oe1V7QPZlEYHWnYBQ7SM1sNDa2kKGVcsailWyeJjHgN6U1NMTjYyAvr1pCD64p7YzxS4HcdqaJLWiEf2i2DziunboK5XQ/+Quw9R/SuqIyc15+J+M78PrATHFG2lPFJWBuGaMGlAyaX8KBiEgdqQGlOKbQA7tRR2ooAKTIBpaTrQApUYzRjik5paACkNLRQCGDOadRijFA7hgUUYooEcZgUYPGKQdacAdwr4ddD6J7MqS7vtXOasA9KjmB+0ipiOlfoWXfwUfI43+KxCuBnJ61uTytFo8bKaw2yBiti450eJVIJzzXVPc54bDdFffLIzMWOOKr3srrdgEnOeKsaPMsc0qNgAjg1TvzsuAQQ2T1qLFXNO9xJawhupxS6i4hsY0C4yOtQ3zj7HbtuUkelOjmj1C08h3CsvQmiw+ZFO2cx3EYJzk+tTXReC7V+gJzirFvYJAd8s6sB0rNupzNOWzlRwKOVi5kaF9bgywuo4fBOKiv23eXbqeARkVbtZ0lgTewytZN1MxvXZcEZ60nFlcyNq5hDQRRmQRpgc5xmmW0EVvIzLNw3GDUC+XewJ5kuGHvUZ06PaXe76dBmlysXMivfiM3B2Yx61TxgUsyoGIVy2DTCGOM9K3iYzHKP3qMMfeH867Xny03YGQK4WXeke5exB/Wut029F9aI38SjFc+IjfU6KDLuaT5SOefpSkUKBiuA7rir8oIzwawZ8N4ph25B2cg1vYBrDmtpn8SJcCM7FXbmmMv6wjPpVwv+weKy/BxA0wqOoY5H41vyRiWIxt/GCK5uKG80KZzbwGeJiTtFAFzxKcQ20R4LzKR+dbEa4RFPUCuf+zahrF7DPOvkwxtnYetdESdxwPQZoATI9aUEfWjgdqUd2A6c0xPY5HWXX+2WGO1V3AXBovJGudUnbbwD1pwVmTnGR0r1aS908qo/eG52EMOta0F3HMixXS4GOCayGQ7lbPTrWvFLaXcAif5HA61MkOLJvsVtNE3kSYYdgar2KbLp0AOVWpYFgsgzLNvJ6Cq1tcob1y52hu9ZcrNGxqAJfKzjGTV65jtml3SyY44FVbpIoZkmV9wBzipJ/sN0qvLIVY9AKOVgpE8V1aW8TIr5B7Ultse2mKHFVSljEpCtuOOKn0wiS3kGMfNzTsVzIySy+aoGQd1a18xW0hbHzA8GmPbWQucs+MVBc33mypHFjah70uVkcxMl1FNGqXKknNE9rbyRvJDkYFLK1tdwpvYRsD2pjy29pA6xy7yw6U0rCbMrcpBHenKDg5pm0MckYzS4YZ54xW6IZZ0PnWW9h/SutrkNEUrrLNngj+ldeew9a4MV8Z3YX4BByKaM80+iuc6BtKBk0HpQM0AJ3pcUGlHSgBmaWg0dKAEzSmgDNAOaAEpaOlGMc0AFHaijtQADpRQKB1oAKKKKAOJ6GpFbvTAQTTwy9MV8N2PoV1I5QWmVu1PbPFRzyBXUAGn/exX6Hlv+7o+Uxv8VhvLKRin7zsCliV9KQYK4oAWu+xxAuAflOCaTy8HJORS4ANKSMYNFkFyPqMHoDkCnBio+U4pOD0pdhosgE3SMuNx/OnKNq4xTlGF96jYtnrSsA4MQpAJBpoAwTnkmgKQ2T0pVQE8+tCAB1ODxTsnGMnFIVO/5eh7UEEU7ANxk8UrHIxQRxkUnWgkUfdK+tWtKvv7OmWFh8jHrVPB3ZFDI7x9ORUVI3Vi4SszuQ6uNy42kcUmOBXOaZrAhQRXJKgcA10COJEDqcqehrzZ0mnqehGopLQeBS9j70ignkUoOfXPpis7JGrjJoTJ/Kk5HSlx82KcBg+tP3Q94TOE5pVbjA4FITntxQCOw4pPyB7XYh6A9dxxWbq2ppZRGJT++I7VLdarbWSNuO5uyg965OQyXN21xNk5Pyj0FdNGk92ctWqrWJQflZsfM/JpQVxTE6nPSlCBsnNd0dDik7gOaeOOB1pMDHFKqgcluaoQvRcHmm0rHNB9KTQ7sYWy2O1JhRwRmnbRnOabjJ60rAmOCgHNX7O7S3iZCeWqkoOaO/ODS5R3FkcyTknpmkJxnP4UEntTeT1p2C4m44ABpdpVgx5pCBt460ZOeTmiwXHMd3WmsCQcdKfx6UhYYxQJkujEf2uEx2/pXWsfmXFchozf8Tzp0H9K63OK4MV8Z3YX4BSeaM8UYzS9q5zqEFLQOKKADFB46Up4pu6gApOPWlOSOaTAoABRjFOpDQAhoHvS4oK5oASkNKOaMZoAO1FFLigBKKXFFAHEKvJpRkHrS9KMZGB1Jr4bsfRNWuRyjcynOamAwBTLgeWyAc56mn8kCv0LLX/s6PlMarVGIV54pQMdRSYIPWkz2JrvucA5vYUmzNIODjNPI460xjMY707d60hB7UmD3oAXljkUvlsaVDT3Py8UAMLcbP1poPOBSNSoOaLCHYDdaQcHjP40ucE04c0Bca3Sox1qVz5Y6ZzUXfNAiTHHvQCA2M00NuYc4qQoCP60AQSJuk2gAirEOo3Vn8qHKjsaYUIbGefWk2gk5PSplBS3HGTjsbVv4iDAecjBv9kVYHiC0Y5PmAj1Fc4eDwKAuTzWTw8GbLETR0Ta/Zjn5ifpSf8ACR2no+fpXOBV38inEKrZC0fV4B9Ykb58RW4BKI5b0xVCXWp5srGNgPc1SAUc4HNOyvpTVCKE60mNCNvLyEOT6mhhyST9KCaCcitYpJWMnqNH3ql+XPFRjpThTFYCvNJtoy2eaUHBoAaQ34U5sYBFN8zORimg8GgB+3imBfrSo2aXvRYAwB3oVSTS4U/WgkjpQAbgKQtn2pDSZ9RxSsO4uAOc5pAOc0E88CnLRYVyQ4IFRsAOetKTmjA5z6Zp2GS6Kytq7gDnFdYoBxXKaHj+1XKjkiur27Vz3rzcVe9zvwysrC0YzSjpRnFYHSNxilpM80uaAEPNLjFLxjik+tADc0d6U4xSUAOozijtRmgA3UZFJikHWgAXilBHNGc0hGKAF6c0bqTd2ooAfxRSdqKAOIPNPBIHFMzSj5jXw/2T6RdRLhW+Ujgd6crdMc/SpFmjhjLyDI9DVPa0xLo20E8AV93lLboo+UzG3tCyDubbjmkzhiPSoHtXyCrtmlERCnLHNeu9zzkTu3GcUmMkZJxVUiRR8rEn0qe3uPM+VhgikMkBwetK7DtSkDnim8ZoEIpFO3Uhxml4xmgLjcZFL06elG4AYp3BxQDIxnBNPBO3ikIIB4qKaRkIXuaBE7SBU+YioMjsw5pggaRMucUotRx8xoAkULnqKlDA8EjFQJAAc5PFI8OTwTQBO+exGKado6kZpvlDAyTTWt1Pc0ASB1HBYUpZQOGFQ/ZQV+8akS2QDkk0AO3L1yKUFO5GKb5EfvSNAg4yRQAp27vvD2oDDP31pjwJkYOajaALg+tAi0WTb95aQSIeNy1D5CFcYpRbIBmgCTK5xuWgSLyMimeUgGcc0ot0C5wfzoFcf5qgYLDNLuXbwwzUKwIWyQaUQIT7UDuLkDnctHH95aGgU9BT/JiVORQMYGA/uinq4A5ZaYI1J+7Q0KAYxQA7zFz95RSNOgH3xTBAijpmgW6HnFACqydS4pWlQD74pohQnpTjBEeNuaAESaI5y4oEyDOGBpr20Q6DAoWGMLgUCHLMjH71K88aAnlmx0FIkMWOAaa5+yFZVXdhhnPpQ3YpG9oFoQDd7cBvXtW2csaZbSLJawsoADLnAFPPtXlVZNysepTVoh+FHWjJorM0EwKQinDrQT7UANXNPODSUCgBtHelxRigA5pKfSEc0AHamjrTqTFAAeDQeaCM0ucUANwKKd1FNoAM0Uu2igDieD3pQdoJHWl2haUKOvtXw62Z9H3Ip18yNB6npU6hRgYAAFV5ycoBUgzgZr7vKP8Ad4nymYfxBWPz8HimnAPXrSmhQCOa9Y85CDAOcimvGCN6cNUm0elNbjpQURw3JaTY/H1qQ5Dn0qGS2877pw3rTAtwilRzjvQSXOtMVuoqsrXOCNtND3BTHl9+tAFsYHUgfWlPUdMetVhJdYx5SmnP9qcAEKoxQA+4nAZY0wSfQ0Ro2d0nJ7URQ7RliC1SnkZ9KAHMoJHPFNYgfdpynIprD5fegAzgingg0m0lPemEEUASsy46UzcG6UdqaE+agB/8JoHTrSBcZooAUcGhyWXihaXOKAG4IHNDDIWlznik27jjNADkAWMnOaXI2jmgR7RjOaacdB2oEw3D1FKJAeM1HsyaUR7XBzQIcxHaheBSY5pSoPWgSFJwKTcO/NKUBFIy4FBYu4CkZsijbxmlVdwPtQAh7GlDgKc0hWkZOKABGDZwKCdppEXbSsCeTQAFsmnLg9AKYAKWPvigB5cpxgfhUNwQYH5xkVMVFQXI/ctj0pPYa3Ot0vnSYMkkheDVxap6UcaTbg/3P61cHtXkz+M9aPwCGgHPSgjNIoxUlCnpSDmnHrSUAFGaKMc0ALSZp2cCmEY5oAdSUgz3peO1ABRRRQAUh60tFABkYpo60uMml4FAC0UfSigDi2ANJ0FG4GgHjpXw99WfR9yK4HKkcVJ97aB1qG6JO3bxyKsKON3fFfd5P/uyPlcw/iCsnpSHCijcdtJ2GR1NeqecDHgYFN/Cnnrx270mTnpx60XFYapHQil3HpS7aDwPSmMQDBwRTZBjABwKcMsPX3pfL3HrQDFjAUUO3y4xSMOgFBGFzQIUHaAuOtG3IwOlAO7FI2RQA7AXHFKQMU2P5uDS9MjOaQw5PemsCeppw9qXGe/NNCY0LnFK3B6U5WCtk01slifWgBp6dKQ/KMmlzwSaROW5PFAAmW6cU7BJpwXB4pCMc5oARV5OTSgAHrSYI685o4BpXAeFGck1GxAPFSIM/jQyKHxnOKLgMAJFBGBzS7hmlcqSB60wEBHpSgnsKCAOKd90YHekAbiTTX6UDJNKyk5HpQAgyRgU9V2IfrTPu4p5bK4NADWIHNBPHSkbp1pOSOtACqD3pS3GKFOOvNOGG7UARbMjjrTkQinH5RSbj6U0NCZO7BAqG4B8mQ47VKTznNQ3Ln7PJj0pPYa3Ov0tR/Zdt/1z/rVroeKqaWMadbc9IwKtkHNeTP4z1IfCJThRRnFSWBFJSk0lAAaKDS5oAbjNKemDQTR9aAEGO9AwKQr70YoAcMZoNNHBpx5oAaaUUhGaWgBp60oANBGTR0oAXgUUmM0UAcQo5p+09jQSOwozxXwttWfRX1I5mVWUEVMG7j0qOdQygnrUqYA/Cvuslf8AsyPlsw/igpyOlTrA7xFwOBUHc4q7DFNJasynCgc160nY4ErFaK1muP8AVgEjqKc9rJar+9AAPTFa2lQ4gmYnll61m3sbJKqsxYY9alO7Ka0K+F9RSnZjmoVUl8k1eh03zF3sxUe9acyJsVMAcg4HoaCQKtyaa6oXVg4HamW9o1y7LwCKXMhOJAMEZNI3AwQavDSDjDygEe9MubJrdFctuHrRzIXKymCNwGKXKnuKY0mJOFqeG2M0hCjgdaHKwkR4CnOOPWjcpJrS/ssFceYM46ZqlJaSQMwYYB6GlzIqzIlIxkU+KBrh8Ieas2enu8bGU7VHerttZJBKHRww70udIOVmMymOQo3UUuQWHr6VLcqZbuUKMsTVoaYm0M8gDgdKFUQcrM8ruyKQBd2BWi+mlY2lDZFR2Vkbhmzwo6mq50LlZXAFI4zjmtE6fEpwJQT9apXETW7YI+lJSuNxsRN0ApGGCKMcBs0MdwxTZJLbW73UjBTjbzikdBG5DHmr+jACZvUjFSTWELSsGkAOc1HNZlqF1cydvXim4OcnAq7d2T2wEmdynpToLDzk3ucCq50HIyiDu708HB5OK0/7OhZSI3GRVOKxL3XluSOeKSmhcrIAeCc5oZxk1avLRbVmAOelOhske2eYnoKOZByspDDY5GaCRnHer1laJcxu3QrUkdnbE/6wbj70c6DlZmDDHAoJCHHerFxbi1mAHIarcVlD5O+YgMemaOdBysy80o6ZFacljCYiYnBNV7KyZpiZjhBRzIFFlUH1oY9hWr9ktZGKq4z2rPubVraVs9O1EZ3Y+WxWJFRy4MEg9qmwKiuCEhc47f1rSWwludbpY/0CH2Qfyq5ux2qrppH9nQcdUFWq8ifxnqU/hGZpQc0hFA4U/WpLFopG6g0ZoAWiiigAoopeKAA9Kb0pc0jHIoAQdRT8+1NPakznjNAC0UmaXOaB2CkNB6Up6UBYO1FHaigRxfbjrQcYHrSKQGyaVvm5FfDLdn0XUjmGQKlVSQKinOIwakQnYDX3GS/7sj5fMdKo5TtYZHBrWtuNPfPRqyWb5AcfjWtZnOmsDg47160jguGlktBOvYA4rMuAROBuJyK0dL2+XPk461mOcSNu5xnmpirMpvQdZpm5Vc55rV1iZlt0iX5fpWNBJsnVh2NbNwBewrtI3CixJSsJGWdUJJB7VZs1H2+UDjmn2tmYMyy4AHvUFvMrXzuvAJpNDTK95IXunG5vlq7cfNpERyc96z7o5uJGBHJq6750hQMEjjFCQ7mZkc4GfatnRV3xSuRg4rHAbAHQ+taGl3KxyFGbCmnK7RCWpYOnSu5kE6g5z1p97EDbIHYMwPamHT2mkLxz4XOSM1HfLFBGoVySOuD3rOzNbk1+GSzXAIBAqlpsrJOsfJ3GtGApeWixl/mxxmktdPFtJueRGOeCKLA2V7XY2qzrtwVqtesxumOTjPSnvN5Gplhjk/MR6VcmtFu5jMrqExmlawkxLZmOmMSals4wbH7+zJ5PrQYVi08hTn3qrZzBkaJzgU0mx3JRpiJL5ougec4zTdSjXyFIKsR6UGz/AHwYyjZ6ZqtfeUuY0b9auN0TJoofeOCMUFcVae12W3ndTVXBNaXM7GjpBZZ247VDcMTcSMxPWptJfbcNvIxirM1klxJvWRQD61nJamkWkh0uZdNjXucYqK882K0Rc4B7027ukjiSBSMr3BqRJY7q3EcrcjvUWK5ijaSN9rQBjWnMCL9CB2qK3toLRzIzhiORVVr95L4uSAgOBVcorkurMRKp/OpbXJ0+VhwMVNMsN9GMOFPqadI8MNk0aMDxU8oXK2kkCK5z/dOKx42ZJCQCfm/rWvp8qiCb5gOOlZqkGTc3TPFOwXLl+MSwk9MDinatkeTj7uBUd7LgxnIYYH4VYBhvbcGQ7SvFHKFzMV3QkR5/Ctm0Eb2JEjFcnk1AVt7RCVIJIqCzvFmkaKUgRk02hJlmGGzWXeJwMHuaTUpoZMbWDY9KPs1osud4K+lUb94i4EC4AojFphJlcnnpUNz/AKpjjIx0qQlhikl4ibJGcVszPqdRpRP9mW5POU/KruTVLSR/xLIOf4P61cIryp/GepT+EWkNGcUdaksOtGBRSng0AB6UlKeuKQ8UAFFA5oxQAh9qDzR07UA47UAH40bRRnPGKQ49KBgPelUGkIzSk4GKAuBBz14peO9NUnNDCgLjqKQGigRxQIpwII4pMCnBRtJzXwy3Z9EtyKbGxc+tSHgqB0qG6wqLz1NSg/Ktfb5L/u6PmMz/AIg488D7tSLM8alQ2FPaowvfNAAJ5r2XueYthRIy5EZI3daiVjkhjmnZw9CoOTRYLiKQO1OSV1OAxFAxQuM0WC455ZTw0jEenamKWQ5U0r5zSrwOadkK7AvwM8k9aUzHBUE7R2ppwaQDHSiyHdkn8A5601WCN0pT0pCuRSsFxxuGbgMR9DTW3Ny3NMVcGnknFFkO7HKzRjhiB7UGZwOGb8TTAxxTST6UuVMLksUuyYMwJH8Wa1w9i2G81lBH3QaxR81O4xScLjUjVuLtFgMUDFh71mBirZY4z6UisaU89aFGwnMbvIYnexHbmml1PJXJ+tLgGmmMFgM1Vhcxfe6DWixBaptn1o2behNA96LBcFYgEhiDS+a4wdxJHGKTHzUY+ajlC40k7gx708Mf4TSMuaVRijlQXE3kt99qQnBz2pCRnpTxg0rFXEErDoxAoBLHBckUpUEUijBp2RLbHs2wYU4zUZJOBmkKFjyaXAA680WQrsGckgMc4pfNKqVBIyaQAHr1pWUZFOyC7FyW/i/OmjJfGMe4oO31NCmiyC7HFiT1PFCjnNBHNPUfKc0DRGASabIoZW3dlNTKuO9Qyjare4NJ7AdPpHOmQ+m3+tXs4rP0UFdJhDenH51oBc15VX4j1KXwiGgcUvNITUmgEj1pT1FREHqKeucc0AOP3qQ9aXrSUAJ0NOzTT0pT0oATOKCd1Gc0oGKBiDrQcUtIx46UCYtIRmhelNOc8UAOxjmgkU3k0pHFACiikHSigDjeKOoIpAM96XGK+HZ9H1I7mEsqU5ASoBAouCSigdqfGBtHXNfbZL/APl8y/iCKQeDRgU8oFXpzTQte4zzOghUUid6cVApinBxikIeFFCqN1Lux2pCMnOcUAPYopweTTG+bpRjcfekYEUANxzTwBimqDgkinr1oBARkUA7eopWypppbNIY4rxmmAEnGacM9zS7flyOoNOwxsaOzHjhetOIAPSnqzAPxwaPLYruI4pARhS2cGm7TnipBntxSYPahCYg4oz70mxs89KTFMQ7A9aTrznmgrxnFAXAoAdgkZ3UFSoyTQAaRgx4PSgB3BwQaAO+aAuBSg4HSgAT5zT2UAVEAQcipNpYdaAGMB2FJ1NPxTCpHOaQxxXHSmnpx1py5zk9KMc0hjQFxyTTSvzZB4qTbu6UjA9OlAEf8Qp7YOCKQjC+9KqnbQAAChhzR0pW55oAM45pdxx7UgqNgxPXigCQk5yOlRTMfLOR61IuVG08+9NlyVIA7Gh7AtzptIYPpcGTzir/TpzWfpaBbCD/dzWgD6V5lT4j1Ka90BTetL0oHNZXZpYQAelKaD0oBxRdhYQZFFO9u/pQ3FFwsJ+FIRS9aBx70XYWG49qWlxnvSY9aLsLBRS4pKLsLCUAGl/Cgjmi7Cwme1GcUpXjjrTNpHvRcLDvwoozxRRdhY4lc56Gn5ycDrSAkdacoAOe9fDva59Df3hkwLD5RyKWKVSPRx2NSpIiEl+lVLoG4mDwjbivsskqN0rHzeZxSlzFoOCfmIFNZ0wfmH51TNu5PLnHem/ZG3ffOK+ge55PQvoQynHNBUA9RmqixSRfcYmlhuSF/fAAk4BoEWSaUEY9aQNnpilNABvAHSgsCKTAzSY5oAeD8uMUAgUw/Wkzx70ASFx3oyvpTFHOTUNzKIxjkt2ApWHcseYinnj60jzRggbh+dUljaYfMSKX7CCcmQ8dqYy75se37w/OladRGf3gx6Zqn9jBPDnHpSizGeW4oETCdGH3h+dBuEJ+8PzqH7BH2JpF09A33iKLCuWhNHj7wP400TxE/eX86hS0RWPzGgWKBuvWgCwbiPcBuX86Rp0z95ePeojYqG3ZpFtUBJJ4oAnE6d3XH1o8+NuA6/nUAslaQMD8vpT/sEZPLEUASmdAPvD86YZ4ieXH51H/Z4B5ckUv2GIcmgCQXEWcbh+dKLiPPDVCbWPFILJW6NigCb7TET98fnR56E8MMfWof7PQnjIx1qb7NEq9CKRQpuoxxuX86VrmEL99fzpgtInNIbOPOTyBSAk+0wjo4P401ruEnqM/Wj7HFjhKYlqivnbimId9phJxuxTvtUIHElMFtGGJK5oNnF124osFx32qE/wAdI11COA/NCWkfPyGmvaRgbtuMUWC4xrtFHLGpBdQlfvD86QW8cmMil+yRjoKLBcDdRBc76jluVkUJG4LNxmphaxBeUzSi3g7RkH1oa0A6SzvrKCxgha4i3onPzCpTqlkP+XhP++hXJ/Y4ly5BJNMa2Q9VOK5Xh03e50RrtKx1w1WyP/Lyn/fQpTqll2uY/wDvoVyYgjUdKctvHzxS+rIr6wzqf7Wsf+fmP/voUv8AatljP2mP/voVyAtELcjin/ZkIwRxR9XQfWGdV/a1jn/j4XPrkUp1SzOP36+5zXKGCMdqGiQrR9XQfWWdSdWsQcfaY/8AvoUDV7Ef8vC/mK5NreLIO2niCLHSj6ug+ss6xdVsSuROv5006rY9TcL/AN9CuUNqgXCg80GKPaAEx60fV0H1hnU/2zYA4+0L+Ypw1eyIyJ1I+tcmLeMj7tCwqqlQOtH1dB9ZZ1g1ex8snz1z9aYdWszg+cPzrlRbrt4p32cbOlH1dB9YZ0/9tWA/5eE/76FOGsWJ/wCW6/nXKC3jzylL5C9lxR9XQfWGdT/atl/z3X86K5fylHaij6ug+sMcfnpRxmj6cUhGPyr85Z9hb3hk+DGPrUqgAfhUUuNi896lyCBzjivtcmivYnzWat89hQByCKb1HNKTiQHORjGKc4bA245r3Dyn5DMZ6ZqG5sxMgHPFWF3DqRSL97ljTEU4mKP5bZ44zVxR8owc1FNEWJI6+tQfvYk9aALeCDmnYJbjpVL7RMoyq5NDXM/ClCSfSgC0w5pQvOQOKpiW4IxtpFeckgg/hQBalkMfHUnpikt49zGRwzMOmaI48YZ/vdqsLkfdoATYC3I6010APy4FPIJOc00ggHoaBiKpLckGnZycYpqk9Rin8gZoANuDTuDUXzE5pwbIx0NAg8sl+OlPximbmWlVsjJNFwDdxzTcH8KNpPPSnY4FADVznIOKlUetNxxkGk525zQA5vakJwKF460j0AAwRg0qqAaYo4p5OFouA5RR14xQDx0pAcHOaRQpBA4GBTe3tUpYbajU4BHrSARadxSYpMZpoTF4prDNLgCnA0xCBsDig/MPao+STTZH8tc5yaAJPlHrSJuIPIJ96robiU9AFqNo5t3BP4UAaATC8mm7scCqwS4xy1M8uUnJagEWz028UHp1FUzHLvGCaRo5j3NFii6cexoUn0qmsMn97mnCOb1NBNi5zj0poJB5PFVDDPjPmHFAhlP8dAWLTAFqCq1S8mQnPmEYpwt5Dz5lKwWLBUk/eGKAMnGRUHkyYxu/Gm+RJnhqNAsW8n1pep+Y1VWOQHBJpzRuWxuo0CxZ2js1NCjP3hVVoZQ33uKQwOv8dGgWLe3HpTgePvCqflSsPv0wQynOXo0Cxfz/ALQpByeSKp+RJnG+lEEoP36NA1LRx6iiqvkSf3qKNA1Js80pzjNGM05SBkGvy19D7x/EMfb5IJHNPGCo4qOZvkXA4qReQK+3yX+CfM5n/EEYA9KVWIHIzQ23aetM3AAA5r3DyUS47lcCoyBnjk0/IwDn8KCvzAjigobj1BoypOKMgEgk0hX0oJF3LggAZo3qBgLz60m0AZNKMUAKqkg03HXpmpFIAoVQzUAAYcbuTSlsdOKYQFJFOC5FADfMo3butAUE0NxQAZC0B8mkXDHmndOgpMaHYOM5po+XnGaehDdelMJ6gdO1IGJkucnpSggKeKEBNBGTigQOTt4oAOzk1JtwOKApY4NUAwZxSpyCDQ5weOgqMknpQA/PzcUmck0q8dqVhQwEGAcZozk0FfmWlI9KkBcj1owG4zimGnEjbQUJuz36UZ5puDRQA786UEY60ikleacVGBTQmMJOaUGkz82KXvTEOOMelVLv/lnjnLVaIz1NQ3CgNH9aAHZCnAJ6UZxzzS7Rx9aXH5UAKXYgU1gQRzTqOO9AIaTQCO+aUrgZFIPegoM80NJnFKcYpBgHpxQAbsjvSjHvSkgjgUHpSbAGxs4FNAwKcfuUg6UrgN3dqd90Zo2ZoxnigAJ5pADkmn7cdaU47UAMzng0zDHNPOO9NOOxNAACRRtOaaM07mgAKmjB96UZzTqAIufeipNp9aKAGYftTlUkc0mM45oBwcZr8ufQ+6fxCSJ+7I7UISBiiU/IeaXb8i7e/evt8l/gnzGZ/wAUczjAxSfeWhV7E1NBaTXDYiXIzzXtt2PMSuREbQKU8kVpf2NK2D5i8dRVGe3eOcR55pc6HyMi25NK3CjB71oQaVJIMs6p9abdaY8MRbcCB6VPOHIZ5BY4UFj6UuxRwQd1ammWijbI7Als4HpVa9txDKxDbifTtTUtRONkVCoFOXp8oFN2sWxmtKLSWeMSGQYocrMEtDMI+bnrSlmUcVek08l1RGBJ9KmGj4Chp13Ht6Ue0DlMoEAE4qUxFod4HBq1c6W8DqSwKNxkdq0orKNLMoZFIAznFHOg5Tn0GFOBmlJIYg8VYFvvuvKiOV6k1bbSoydzTqD6Uc6GomWo4IzUywM0BbBGO9SXNi0DIwYbWOOlaiwxmw27xzxkUc6HymTBBJLGSvAHU1CQe4xg9avwWBbckUpx3pkdlJLL5anIB5NHOg5Stj5cgmmb2BrTbTYk+UTgmq11Ytb7WY5Boc0TylORvlNAAC53c+mK1o9IVoPOMm0YzTBpkcqny5zuxnFJTDlM7PzDNKWGeQafIuxsEcjikUbyAO9WpE8rImJLDAp+QCM5q/8A2VEiq80pXNJPpqRoHjmBHvWfNqWolSK2kn3sqkKoqJFJ4POK3LCFFtZd0hPFVYdPhkmMaOc9c0uYfKZ2BgjBoWJmZRg4JqzLbNDOU3d8VbbT0hWOYSndn7tPnGolKSzkiQk9O1V1LcAjNbepj/RUIHJqC30gCAzTy7QenFHtEhOGhmBct0xSkAVp/wBmxMCYZt59Kz5oWjkKMMEVUZ3E42RFgVHOBlPrUuCKguTymfWrIJ8DdQW9qbjJyPSnoAetHUBAM1d/s0/ZhKT2qkeAea22ctpagDLYxms5ysWo30MRvlBGaZya2LOzVYzJMu49acsltNJ5SwhW6cms/aFeyMYLvYIuSTWl/ZcUUAaWQ5PanGNbK6AaPOTV68uUSFHMeQe1DqFqmZP2L/RTIhJwelV2AHB4roTNENOWTy+M9KwZyJXLKuB6U4TuKcbERIVQCM5pMHdjbxTgAzp7GtqRLeC1WV0yfQU3KzJUboxoFVpGWVmQdqRlAZgrZ+bgmtS3Md3dEhAAozimLYC4unHRVOeKPaD9mZvLtt6GmlTkgHp3rXklt4pdnk5I75pZ4oWs2kRME0e0D2ZS+x/6J5vU1VBUZGGz9K17PBsiZPuimrcwTuIBEOTjNL2gezMlE3SbBkljge1WLu3EEgiGd2Mk1dKpa34GzIyMVLqk8asQEBY9TRz3DksY6qGGM8ik4BxRyHyKDxzWvQytqISM0VoWunQXMAkaQgk9KKjniXySM3OKUYoyD2oHJ9K/Md0fcP4hJQDC3akjY7FBHSnyLlcZpqkqMYBr7bJX+5Pmc0X70fuCnNaenTx+U6g7XJ4OaywuRzWjZWkU0bYz5g+7zXs1HoebDRlv7Ldo2+Jw2feq3lvNfKJVAYVJbxahFMu/hAeDmnXVzGL9cEe5FYamvMize281xtSIBcDrmmxQSx27pKwbA7HNRXUdzcYa3kwPrUDWt0sbF5sHHrVWY7oi09MXb5c8ZwKqzviZ8knmrWmsEvMMcnGPrUV9bNDI74BDN61UdyJu5WBO3Na80oTTIyScnpisV97DCCtW6Yf2VCrEbh6VUtyVoO0kl5JHIJ2DIzVe7uGN0zLkE8YzUulTrEzK5xup95Yy3EwaJkCHvUMdx+4S6USzMWHemxljpcjAn0p86x2emrEHDOaZZHzLJolYByc4NZ6laEGmiYynbjp1q01oiSmSab8M03TgYXeOQqHOcGmGyneXLyjGeee1FmF0WdUVfIiVSCMZBqONcaUxHJzTtT+e2ARh8oA4pLYGXTvJRgGPJJosx3QukEmKY85waWxcLDO/oTTbA+QkkbMM880y0uIVeSKU8MetFmF0Zrvum3ZI59a2LlvN0scAY7moZtOR5RIHUJ9aW9ngW1EKHJFVZi0JZmP9jKMnOKz9Pk3XihSenNXbiRG0tArDIHSqOnlRdjPA7mhITaG3gAuCPeokYI4Y9M1NqBU3DbDVeNgNgIzzmtbaEX1Ngvb3qplyrqOhqKWzfaSrblHvTZrVJ9skMwR8etTq6WNg8ck4eR+hFZNMtMbYH/RrkHGQOKr2s/lXKsal06WPZMjuAW45qrdxLFJhZN2TniizK0NC5tmnuY5V6ZBNMv5B56op+6RU9vIDaht3Cjmsdple63MflJpcrC5s3f7yCP8ACn3cRmtYlMm1AOnrVa/nRbWMLIpPHAqQywXtmsXmqrqO5pODY00Mt44beTf5wweoJqjqLrJdbo2BAqzFa28KkyTI+fQ1n3DxrcMU5StYRsZzZCd2arzZZlQ+uc1ZJ3sCvSq9yOUx13VuYk6nHHoKM9aUj09OaZg5oAXzAeMVvR7f7OA6HGawT2+ta32uEWXlH7xHFZTVzWDsy/AUWw3H5qzjc2aXCvsbcD2FFnexwxeU/INT+dpuckfMOeKy5Wa8yIbudZpUYKw+oqbUzm2hwO1Uru6Sa5UoMIKfe30U0Cxp1AxRyMXMi1ndpG0dc1Tt5raNWEoyfaprK+iS3MUw696ozmNpG8odaqELClK5aluLRsCOMg1PegrYJ6GsrBCjdgHNXrmeOSzRA3zAVTjclSsO0iQC8cEcbe1W7Zl+0zBT1rM06cW85dx8pqWOdv7RLQ8qe1S4lKSIbkFbh1Oee9XiCNMzTpjauxMysG9qdcyD+z8KML2zUopkEYYaW7dKoWY/0qNjnGeoqzHdqLJ4Ty3aq9syRSBpCcA5xVJE3L99lb6PPQkU3WIvnDA9hUV1dRzTRsM4U1ZuLy2mt8H73anYVzGBK5zQHycU9vm6YxUeOeK1WqM29SdJ5I12r09qKi5HSio5UPmfcaD6UDpSAc089K/Muh9x9oY/3DSoORSNjyzzQhyRX2mS60T5rNP4pKWzxSpLInCEjPeogeTRuJ6cV7lrnlJ6Frz5wMGVsemaq7WzuJO496UA9SaUktgDtRyoLskSWZEIWRhSNLM64aQmkAOc0nrRyj5gUEchuaWV2cYck00DB60oXJOT0oS1E2LkBeKaXdgAc4HalyuOKceV4p21FcYWbIIOKeJJFH+sb86RRkc0pQGk4hcaxZiCWJxTg5Q5ViD7U3AHFKEwDzRyoOYduOCQTu9aPMlGSZTz1poX1prA+tHKg5iRZD6nFGXByHIFR9RgU4dMGlyoOYXLFSQxpMfdINJ0OKcqnZ16UcqHdjvNZRtJOPrSDBPHU0HbjnrTcE9KfKLmHPkYyTkdKaGI6Gl2k9TQVwOtHKF2CjJyxyTS7cHANNA5qTihoaY0x4HU5+tN2ncOelS44ppHOc0JLqO411JOQeasWqI8o85uPU1AM55ob61PKHMal1NDDCYYCMN1IrKKg0g9jx704AA1SVhOQ3y/mBLVKFx361ETzSgnFOyYuZjiAFxnikwApA6HrTSDijgDmiwrsVV29DUNxgMh96lFQXq5EeDjmmBaGA3TgiggdhSqVK9egFMY+hoACoowoIzSEijKmpsNvQeQCvWkAC8jqaAPypCAOaLE6gpxSsBuBNKV44qN8jk0WKHH0oCrnNIBnmnAAUWGNKjd1oz1FKQM5powQcU0hO7DaAN2enaprWcW8u8jmosd8U0deaHEEbH9pRNy0Sk1UvbwXIUYwq9AKqcHoeKbgE46VCgW5Djngg4pSCR96gr8uM0h4FXyom7FC8U0AZo5xSAEck0WC7HEDsKULTM5NKAR3ppEu47j1opuB60UWDUZinBMdTxQvvSnpzX5b0PvPtEczKqEAc0Ix2ZAonA8rjrSRBuAPu96+1yL+EfNZp/EHr83NGcU/Zj7tAGO1e5HqeQiMPk4pPM2nipdp64FII8qcAZplAsnFJv9s0uw46CkOV7UEhvDZwKFJCZIxmkjIUHjrUhXjO7PtQAzJVTjBp0bcc4pOAtCigBWJzgUmD60p603HPWgBdh60/PNMDMeKcqepoAG+9xSHmn7femlfekxpC7eKYeDUgzim9+aQWI8FmAFOO5WxnipNg7dKMY4Xk+9AyFwfWhZDjApxweo6UDaTwKokAxJpCxzT0xk8UcYPFAAjAjmjB60KwK8rQQSBzQwQ4HPFBHvSFdpHPNL9akobuzS7eKUZXsKUnuaAG7cUU4kHmk2E854oExhoJI6UoBzTiAB0poQzce9DfMKdgMMYox2pgNHaknjEiAdxQcg04fMeTQBUUSxng59qdvlParRUelMxzQBXadhxtP5Unntj7h/KrezP8NBGB90UAVPPl7KfypwmlI6VZB4xtFCkAHIFAFUXEucbTQZ5DxtJ/CrQA+9gU0jnOMUFEBmkXBCH8qT7VJ/cP5VaApCPYUAVTcyH+A/lR50gH3TVnA9KUBcdKAKonkAztP5U3zpCfuN+VXQq4zSYVecUAUzLKvy460vnyggbD+VXMbmzgUevyigCmZpuuPwp3nykcrVggegoIXHSgCsXlI6H8qQPKoIxmrYUUjKM8UAVfMl/u0rPIAnB5PNWQnel+VuMUAU5JJg/C8UVdKg9qKAGd6UkYwaQn2pACa/Lfsn3XQZMAEzzSwyrjZnBqVmVcFxkDqKpTzCSctFGVFfYZDJ8ljwMypczui+CAPlNAyRk+tUB5ox81DG4J4cDFfRW1PKeHZeLYJB6U0Yz3FVFMxJO8VJFcnO1xj3oJnSaRYIPrSkHHrSHpkHNOAO3nvQZWsIAD1FBUDoKVclsZoPTNAIjJVuKVeKRcAn5c04L3FAwalCDGaNhbtUN5P9mjG7qaBRi5MmAUdSaTKrWeDcTLndtHpSm3fAJlNB0fV2aBYZGOKQ5HJYEVQaBsD94ad9nYrguaClhmy8ZBtzk0B0yA361m+TION5xT/KZsDcaA+qs0yyqMoRTGZTzuAP1qgYmUYDnFM+zEnl2oD6qy8JEPUinI8StjIwe9UPsx/vmla1BXBdsH0oD6qzR3pzg/jTdw9eKofZ8gqJGwKeltkY8xvzoD6qy2ZYy2CaQzRqRh8+xqm1sA3LtSfY8nKk/iaA+qstidGc8jI6U9ZVJ5qgLQBixY59jTxa99zfnQH1Vlt5EHVgKasynqQaptaKfvMx/GiO0APUkUB9VZeVkZTkgfSl85doDHFUmtuoViM1f0Wztr2TyLpmMnOMGolPl1E8MxhmjzjdTPNBzkgc10H/AAjVkWz8/wCdPPh2zx0b86y+soj6sznxPGuMkU0zRseJBXQnw5ZEdGH401fDllno350fWUH1ZmAZl/vg00yDcDnFdIPD9kD0P50raDZtxg/nR9ZQfVmc4bhOgcfSk89Awya6MeG7HuG/Onjw9ZA5Cn8TR9ZQfVmc8J0A+8PzqI3ALAEjB4610zeHrLqIv1pBoFl02EH60fWUNYZnNLNCXZQ/K/rQZEPcfnXSf8I/ZKTlMk9eaVfDth/zz/Wl9ZQ/qzOa89AMAjFAmT14rpT4fsj/AAn86P8AhH7PGACKf1lE/V2c0LhRnLUCdGB5FdN/wj9mcZBpv9gWQ4Cn86PrKD6uzm/OQcEijz0IrpP+Efs2cHacfWlbQLIHgN+dH1lDWHZzP2iEDmQA0gu4jxvBrp/+Efsccx5pB4fsR0jpfWUP6sznfPjA4K0wTrn7wrpjoVmf4KDodl/zzpfW0H1ZnNCePnLCnCaMjjBrof7Asj1Q4+tO/sGyA4Vvzo+toPqzOa89c4LChpkQcsOa6b+wrMdY6Q6HaMOUo+toPqzOZ+0xkgeYPpR9ojzwwBrqP7CsQMiLkUHQrP8A5580fW0H1ZnLi69xRXVDRLXH+rFFH1tB9WZy2eacvrTSKdjFfm/Q+z6DLw/uSahjGU6Yqa8P+jk/SoUf5RX2WQq1M8rG9iQKc9aNoyeaBml+UA4r31ucDWxGmASeabKvmHI4NORd2R2p20MevSmDSYltPh9jg1c57nI7VRdQfunBpiyTwtg/ODzQcc6V9jQHBzijzAyAY5qs95kDbA3HU00ahjjyD9aDFUZFtSDmmqctiqn9oNnAgNOE8rnCJjNBSou5PLcrDnqT6VT2Sz4aTHtmnLE3mbpDk1M252GBgCg6aVLlEH3eO3FIM45PWnLkE8Cg8HJFK50CEjjNPBXHem8MeBSA8EUXAdt3dKUoFHUUwlVxyaXdmi4DSeaeCKTaD9abg54ouA8iiTO0bcUznPJqQYJxmi4DYpPlcEcnvRnml2gZpG6UXANxJ6CnncegFQtnIxUoOAOaLgNKknpzTsNilIKjOabkmi4CEZpQ21TQBikxii4B15OcVNo7bfEEQUdQeajHQ1Po6n+3bfgfdP8AWs6yvAdzsyCp560mc808/M5+lNGK8uwhGG4elIvXPpS85pcUANIzzQBzTqKAFzRketJTW9qAHEe9IPrSqcikAxQAdaTHpTqToaVguL2pM040zvTAXNKBk008mnDigANAHHNFBNADSaTNBFKBSsFxaMUUU7BcMUUhOBQeRRYLinpSAUtFFguLn5cUN2pBwaU80WC47cKKbRRYDhcmnHoKYT0p2c18M3ofR9CK9z9mIHt/OmRIdgJFSXJzCeKiV2UgY4r7PIv4Z5WNJQcUFcjgUMd3OMUEHiveRxPoIoYUgGDRzuFLjmncQnBPIp+3I4ppGegoTdzmi4rCCMKCB3pc8YFKyt1FGOPegBAhPYUHg+hpoB3YBo/j57UAh4wOppSFJ+VsCmkBsHHFAAAxigq47Ax15oPOBmm4AYUoXLdaQhwG3oaQYz1objPtTcAEUgFK5HNGOnNDHmkAzkmgB4Az1p3HrTRx2pePSmAjKtJgZyDQwFKMAUgF49aaVYnrxSE4p4Py0AAGKUgGl7VGxKtQBI2AmKYDTCSzCl6UXAkkK4G2mBt3UUqgAdKAuMnNACkgVY0sga7bbSR1H6VUIJNT6VJ/xPbYY7n+VRV+ADuTgHIph60p5pMV5jAUUtJ0FG72pAI2O1HakAOaWgBvU4p22kxzS80ANAIpxNL1FJQAmTRS80mKAFPSkxQM0E+1AC44zSU4HNGfagBKQ9aUmkBzQAYOKTcR2pxbjim5NACDNOoFBoAOtIeKWkIzQAtFFFABS0lL1oASinZFFAHBkUooHWnFc9K+Fex9H0IpziI1ApJ5qe8QeQeeeKjUZQDHPrX2eRfwzysaNMhwfWlVmI7ipVVR1HNSJbySbiiEgV79zifQgycZpu45q5FaTOmfLOB7VA0DRsRICvpRzIRHvbdinnPrU0VrJL9xC3vT5LOWMcoT74qbgQBiBTGDZzU9vBJNJhVzjtS3ELwuEkQgnpRzJAQRMNpz1FCxtMcgHbjrVh7O5ijR2QAO3GfSthrM/wBjIioBJknI+tLnQHOqSAVz92nIrZzUhj8pyGHI60uCzDYM7u1FwI2Uldw5pq8YrRTTLyRMbdoqvdWcloVEoxkdaXMBBn5mNIPmGatQ2U9wVCJ8p71NNpd3FAWaMFR6UcwGexAxk0oBNOSMyusSrl81orol04Pyj5ec5o5gMwqQM56U4pIEDbTtPQ4qeaylS7Fs3ysRkZrZurGVtPiiQKMLyaXOBzmc0p6VbSxmdCwAKrwcVXVDNOIV4JOKdwG8cZpTyPlq1c6e1tMkTHlu9WTo9zwqYAxnNHMBlANnBNPKHvV99JvIsYTcfWnvpE5iD5AfutHMgMnPzYp2zd1p00Twy7GGCK0LXRrm4QSMQq+9HOgM7ax4FL5TAda0LrTZLZN7Hd/u1SJOMZp3AiwVHNTaYF/tq2IHc/yqFmJqTTMrrlqPc/yqZ/CB3QpKVvvU09a817gKSAKTcKQjIxSYxSAdnNGMUUUAFFFFACikpT0pBQAUZoox60AG6gYY0cUZxQAPwaGxjg80h+YUijBoAdxgUlKRTdtAC0YFJtx3paACiiigAoopaAEoobrxQe1ABuA60v3unFN2ZqQDAoAj8s0VJmigDhCRnFOAGcUwrzTx8vU18K9j6LoR3aZtmI68fzqOPdsAPNPu2zAR602LaYwT2r7PIv4Z5eL3FJxW/oP72KQcYB5z9KwAN1b+hACG49cV7UtmcJUm1OaK+aOFgsYYDAFO1ob4beT5f3jYzWfsLXRHAbf1P1rR1zbHa2a8bd2eBUNaAXnjbTNIX7MheZgDkDNVrC6vZbhYrmBvLcc5Xoa0JrmZdPjltgrEKBjGcVnx6jqcz7UjQN6kYqVcCpcr/Z+pMsbHrnmtj7Nb6gsdzJwFHNYl0txdXSrMVaQkAlK2jcR6ckdsVzuwDzTdxoxtT1FrmYRpxHE2FH0rT8900BJdx3kkZ/Gs3UtO8mcyoCUJ3Gr5ia58PrFCNzKd3FIDBdmZy3Vj1rV0S3SS6JkBwvSsl43WfaflbHStTQ3aK4bc2c1rbQCTUdUuEu2SJyiqcAAVWe/e/iigmT5jJ972qxqWnz/a96LlXOelI+nLYxRXDk7w2cVFgLuov9gtYki4OOtUtOvZ2nEUrlgx6GruqQnUbCOWHLMuOBVTTdNm88SSgxiPn5u9NOyGLDHDaa2zTLhD09qvXtpeuDNbTFlbnGe1ZbGLUNWMTsyqDw3rVqSHUraQrbszIOnFZtu4jPvpLouHnyJFGAcVoXM8o0eFgxyV5NJrMgNhGJh+/I5x2p72zXOjQCIEkCncBmlZ/sm5I6is2yY/b4yw53da1dIgK21zbtw5HANVrXTJlu1eT5UDZyaLsC3reGv7cdiRzU2t3MlrBGsbEZAG4VHq+0XVu38JIGaZr4bZGnYgHNCGT6RcyS2MkjsWKnAJrJF9NHermU4aTGMe9aeiEf2fKBwM1i4J1FcHpJx+dMRcuoFuNaZD0BBIA61qahaXUoRbdyiKvQVmXlw1vqwmL9cH9Kt3KXN2UurOZsHgpmodwLGn2dzGki3RBUrxk1z1wmy4kA6ZrWj0+9KtLcXRT2NYzgrM4LbueDWkBkRODwOTUthltbtSOzf0pmVY9MU6xwms2vXlz/Krq/CSzuzyeaZnB6Zp2ab0ry3uAA5PSg0DmlIzQAlGKKcoouA0g4oHvTyKYaaAM0lJg0oFABS0lLQAEDFN74p1IeDQAuPSk6daXd7UEbqADdR1pAvvQOKAFppp1N6mgB1JRRnmgApaMYooAAaNppcUmD60AKAfWlpACO9OxQBHzRT8UUAcKxHUUDB60lAODXw1tD6ES6KiA8c1GiYjDZ4NJfHbas/0/nTI/wDUqCT64r7PI/4Z5uL3JsgHg1estQayRwACG61n5BHAxSEsBjNe5y3ucBNI3nTmXHfOKmu7t7iGNMY21TRiDipOtHKBattVltGO07kIxtNTnXrgqQscY98c1llQpyeaQuOgFRyAW4L1op/OABOecimzXDzymSUknPGKg3DGMU8EY6VVgL0uqytb+QyggjGaS21KezQhMEYxVNGJ+8BSMSDz0pOKHcfPIbiYyEYJHOKahaI7gxppPpUbscUwuag1u6VQow2Bjmq895Nc/NK3A7VVTkU7gqc0cqC5YttRmtlIjfAqSTVbmRCGfOaoqBQw9KOVBcckjrKGVsFeavrrF4v8YweaotGyqDikO4qelPlQXJJ7mS7mMkrZ44FSW+o3FugjjbjFVQGwaI1xkmlyILllL2cTGQNhyKe+qXRQKzZGe1Uirl8r0p+D3pcqC5NPdS3G3e2QvIomvZrnashzgYFQkECk7cdaTiBZhu5IEKRng9ai8wrKGB+bOaRBgUwjLZ9KErDJJpGmkLSHPapra7mtxsRziq5GaUcU3YCe41C4mTa0h/Cq0Y+vPrS7ec1IMUrAMKgKSaWyIbWLTjjf1/Ch+eKSwyNVtcjjeaU/hJZ3ecdqQnNKaQV5j3AQU6kPSm5IoAWnLQOlFKwCnpTDTqaTRewAGpcZ5pq0vO6qATPOKWhcZNBoHYKAaM5pQMUCEJ4oBpSMjFJjFAAO9FFFACHpRjilooASk70rUAjvQA49KQkYpcjFMNAEgozikXpSkZoASnEjHWkxRigBaKXtRQBweB605VUjmkwKTgdK+G6H0LQy7Gbdl2+n86jVSUBGKkuj/ozVDAf3Yznmvs8j/hnm4vcl24HNGAe1L06mg47V7pwDdhzmlzim/Nn2p3GOaAsC8tSEYbpTlYBqRyAeDmnoA1SS3SpMn0FRDPel49aNAJNvekPPem5NKuS3IOKLAGMDrSYB605mXPFMLZo0AeoA6UrAYpqjNI52igBAPSng4603GYy2cUAH60ASmTzBjPSozj7tJg54OKO+aAAFUPqaePm7VGSWNKGKDmgBw+U8cUFtvbrQTnmkPIxU2EODKw96Ao9KYqBRnPNPBpNFIeAB3pNuOlRt14NJuYUgH8igHgZNAYMvJqMnoAOlAE5IophYYFAagBx3djRaybdXsx283B/KgMKbbEDVbXjP7zNTP4RM744zxTaXINJXmN6gA60h60vFIetIBOaUUtITii4C0zHNOJ4pKQARjpQDQTxmm4qkCHjFIaB1oNMYCjJoozii4gDHNLkUnUdaShuwCt7UCgelJ0ouAvWlxSdqaAc5zQA5lpNvrSnJoGe9AAAM0YFBOKFIJoAdiiiigAJ4oFIT2pRQA6iiigDg8j0pDnHQ0pIPAFOAwpBr4b7Nz6NkNx/x7njioY2xGKt43/IehqtNbvbyAJhga+nyfF06cbSZwYmk5u6FLnPalO480Ir91pwEhzxxXtLM8PqrnGsNIZliMUEHFObepzjio9249xWtLGUajsmS6EkCgh+akKbDnOc0pGcU4sNuDXUYkY+Y5pdmaVeRwKXHPWgBm1lpfmx6U5lz3pD0xR6ARopwc0BSTTxwMUnC8Z61LfLqyoxuNwR3pCNy8nmhUlYHjjNPEEirzzXBVzShTdrnRDDSaGDjAyacQ2RtNKI3PbFBibHB5rL+16Hcr6pIBnvSAEtTjG/HNL5bdyaTzeh3D6pIa2RwBTWPHPNTeUcdTR5Hqan+2KHcPqkhgBwODQTzTzCf71IsBU9af9sUO4vqkhvamkkdjUnlcHmk8of3jS/tjD9xrCSGK2adtODmnCDHenFMEAmk84w/cPqkiEqQM0o4GameIcYPFNMRxwaX9sUO4fVJEe7PalIyRin+SR3pRFk5J6Uf23RD6pIj6dTUumxG61iFowdqHmkaFW5DcmpLUvbEtE4DetZ1M6pNB9UkduUIpNue9codRvMf66m/2leDpLXE81pbh9UkddtNHSuQGoXxB/f0n9o3o/5bZo/takH1SR1+aXGVzXHC/v8AOTO2PpTzqV0VyZWOKP7VpB9UkdawOBxSdK5FtQvGxtlZad/aN2o5nc/hR/atIPqkjqucdDSqOOa5P7fdH/lo1Ne/uwOJiKX9rUw+qSOwxzSHPpXJC/utvMxJpDqF1/z0NH9q0w+qSOuwaMZ71yAv7v8A57mg390DxPTWaUw+qSOvCnNIOtc1BrNzGVWRtwzjNdDG4kjR179a66GLhW2MqtF09yXOaTGKF70tdVtTBbC9qMCkNAoAMNRgg80pY9qTJPWgAPJpBgHpSjrTqADOaTBBoBp2cmgBDwc0mM049aWgBKKSigDg8/L05pwJ4zTQpAxmn7gMZFfCH0Q2TKjcDTowCM96SY4WkAO3jimpNbCHE87c80m7HFKAcds+tJjJ560cz7iaHBgB0prAMpwBmgjAo6kHpWlDETpvcU4pohMqqwQ9fWlyM4p80asQRjPrUGSpwRmvscBmsaitI82th2tidWHTtTDndx0pu/jpinZGK9NYmkupz+xkKeehowMcdaQEAdKQHg4U59af1ql3BUZCsVGN1NRfMf7vHrTxCZMF+1TD5BgYr5zNc3v7kDvoYfuKF2jHajcMYpc5FNxzXzEpOWrZ2JWFBHQUu0AZppHp1pCx6Uk7uwx2aRmximAjnJxil3bhkKTWnIxXiuo4E560E00bs/dP5U4hv7po5H2Dmj3EDU7NRcg8gj8KXcM0nB7gncf3pcUikHoc0tZXuOzW4tIQDQDSninYBuT07U4U3vSqc9qLAFA4o4PekzzQA8AY6UEccUhbtSgigBMUE4oo4osAnGOlLgYpKd2pWQCZwKYTxxTtue9Jt4xmnYALHI4px5IpMZxk0hf5sAUrASZ4qMnJ5NKKQiiyAUYzTgAT0pEOTzikzlsDinYBT9aTAJpSAPejABpgDqNhAro9HlLWY384rnTtI4rf0XH2civWyt/vLHHjF7hqAqO1Gc0mKWvp7nlBRRQOaQCBqdjdSBRSk46UABGOaUUnJpRxQA3vTxjFJt96D1FAAc5pwo7UCgBKKWigDhDSEDGc0p6UwjIr4Q+jsEjfLT1PydKjkX5KVCCu3JpBYkDYpM80hFL7UXE0KetNPNKBzigfKeeaLgtAA+al2A8mgDnNBOT7U4zlHYlq4gjVjjtQyJjGKXvx0o+8cVoq9X+YXKg24A4FIFHfinEH1pvah1qtviDlQuQDgZpCaAcUocZ6Vi3fcdhM0Z7mlamswXkjI9KaWnmPZCuQq7jnHtVy002e8A3Aqh74qTStMluJRNKP3Y/hPeunUKiBQoAHpXsYHL+a0pnDXxKjojJt9Dtos78saux2FuqACMfjVmgV7X1WlHocDqylqQfYof8AnmtL9jgx9wVY7UU/YUv5SeeXcqGwgPVFqtcaPBNwo2nHatId6KTwtJq1io1px6nK3OkT2oZk+ZR6VSDBvlHUda7ZgGXDDIPWuf1PRWjla4t/uDkqK8bG5dye9A7qGKcnaRlHimk+tG7JoYZrxrW3O8AfSnLkKeaYBTgPekAAj0oHJ4FKT7UK2DQAuMtigjHekB6mjr1oAcKSkFGcUAL14pdtNA5zSlj2oACCKMe9KCMUhwaADHvQRikGc06gBm7NL060YIpD6UALjJ4pcUJxSmgBORRwetKtJTuAoArc0RsxMKxRW3oa4hY16eVv96cmL1ga9FJilxivqbHkrUQ0q0YzxSgYoHYAKQjFOpR15oENFLQ+B92ge9ABmnY9aaBg07OaAG5wadkUxhkYpQMCgB2aKQUUAcLSGjNIea+EPoxGPFOQAcgU1/uHmlj5XNJgKTzQKMcZBoGfTj1osxNijrSd6XODz0pD1zxikK4760hzig89KUMCMdKfKw2F7YHWkUEZNI2QMjtU1usZBEuQcZppPYnmRCGzzRU9tbLPcYXIHpTJ4vKmZewNNwaVwUk3YaAMUmBSk4x3pM+1QUIwOetTWcDXN7HGBlepqLgj72BWv4bj3SPMeR0FdmDp+1rLsYYmXLG5uogRQi8KB0FPpQoxjuD1o219evdVkjxb8zuxuKOacKG+UZP3RyaNxu3QTPAzwf50gOQcc1zU7X2sagyWspjiU4JpjS3+gX0ayyedAxwWPSqsI6gUGmo4kVXHRuaeRSATOOaJBuiYetJ1pTyRiplaSswTadzjbu0ks7xg5+VjkVCM963vEabrYTAcqcVgJkrmvksfS5Kp7WHqc0NRwGeKdsAFIPWlGc+p9K40ruyNnoNYntQBjk05o5D/AMs2FNKsvX8qbi1uLmQo55HSlpVV3X5EJ/Cgq6j5kI/CjlY1JMavfNJ60pB4wMk9qkMEv/PM0KEn0BtEYPFOzgU1gVJGOnWgI7j5EYj6UKLbsF0OAHpSADPSn7GQfMCPqKYoZjwD+FLlYLUU4zSE0pjkyPkb8qZg7yp4I9aOVgmhxbI6mkHLdCaVFZvugn8K0dNhPmnzI+MdxWkKUpPYmckkZq8NzT+KfdLtupBwAOgpghmbnymx9KU4SjshxkuolGM0rKU4YYPvTCSDUNNbjTT2HngVtaFITCwrCB9elbeg4CtXp5ZpVObFL3Td7CkpetIK+pPIiJnBpQ2TSbaQKQaB3H0hpwGaQdaBCAHNBpx4ppNAD+MUZFIBxRigBeKKTFLQAYopM0UAcHRg0op2cV8IfRkTrhSadFwnNDHihentSewC4BzjitJYVbTw6rlh1rN4zxWvo8nmLLC2ORxW1BXdjGq7K5Fp9okzOZBlRUN1aD7asarhT0rRcfZLPAHzE81OsS3BimPGF616H1ZWucqqO5mXtuiLHEmA5xmrSx2ltGAyiVsfw81UdRc6iFyeuM1dubiGyxGsYLY6ms6dON9QlOQ4QW91A5EBj47jFUdOjVppN4DbQcVoWVxJNbSlwACpxVLSv9bL+NVUpxU42JjKVibTnRZpAFG4kge1Vb+dGdk2/MvBpbLIv25/iNV7/m9fAxWFa3svmb00+YrdaNrU/aKXpXCdIxlzEBXR+HwI7Hgc5rnHztNb3h9/MhKZ6V6eVfxDlxf8M285anVGMipD0r6lnjoQ4puN2fcYpaQg7ge3Q0IZFHbwW2QpCd2Oa5zV5W1q8jtbZd8Ub/M68j86XUp7jVtU/s+3fy4l4Zwetb1hpsWmWixI2WHLN60wJ44fIhjQc4AFPYYz70EhuQaQZ70mA2lp1FSD2MvXAP7Nk4+lcyn+rH0rpdflVLAIfvE1zafdFfM5r/FR62DdoaigZGK09KjUiTIUuOmazMZ4zVuzgeRXdXKkdhXFQa9orm1Z6aF9pZ42+eEFfYVTcLeXKxqhQ+4qa3vZkmWJwWBPcVYuUVL+GUEA9xXoOnF6nNGTFn3W8axQRZbuQM0xFa6tpRIhVgOuKkvbieJleJQV71US8upVcAbR64pvlStYm8tx+nWkYDytzs9ajbUn80gIu3NTac4MEsZPJzWbKrpKUEZOT1xWFRu2iKheT1LzLDeXCBBjP3sVLcXK2rGOFRwOcioLeFbKWNmbJbtSahETdeYMlSO1KT93RalKzdrk8Uy30DK8YDAdqj05FWeUEZCjjin6cjJFLIw2gjjNJYtmWX/dNWlpewttLjP7RbzAnlrt3Y6VJqMUbiNlUKzdcVmO2HP+9WneNttbdiOe9Zxmm7WHOHLrcknkjsbdAqgkgc0thfGWba6jGKW+i+0WaFecAHiq2mQt9o3FSAOOa1i2paIlpON7jUWE6u3m9CeKvT/aUkJhAKAcDFZ8luLrUpF3+XtpWmurWQx/Mw7GqTu9US1puQ30pldS0ZQ9+MVVYLnitS8HnWyuwCvWXznmuLERszqovQCvymtnQl+VqxznbxW3oWCDzzXVlv8AGIxT902hxQKQ9aUV9Ujx1sLSUpoGKAFFFLSHrQAdRRikHWnUAFFFGKACim5waXrQAlFOooA4UAYoPWk3+1ITkivhD6MjepE5XmmyDCk0sZyKTAMAd6mtZzbzq4OOeai2g0jLjFOM3F3RM43VjRv7xZ3UIcetTDUQln5eTnGKyRxk96NxJ6itvrVQy9iieOby5VcH3rTN3ZygNKgZsd6x2XjrTQMdelEcROISpXNlNSQBlCgJjFVbaeO3dzn73QVQPP3eKXPI9aJYpt3ZKo2LUEwjui5Hc068nilJKoN3rVNmJpVOaiVbmVi407O4EkEUp6UH3o7VkaCKcg7qvaLN5OoeWxwrCqXGOaRXdZNy9R0row1X2NRGNWPNGx3GMLnt2ozWXpepieMQyMN46Vpk4ODX1+HqKqro8adNwYVHOhljZFYqxGMipKdjHOea0TavcW6ObTwuVkd1v5UYnPFSr4fkEgdr6YqvbPWt3Jwc4NKXJHNO4iOJQqgDsMU+hR3pM84AyaLgO60EADJOBSVl6tqYt4fLiYFz+lY16saUOa5cIObMvW7xby68lP4OtUBjAwKULli55duppD8tfJYmr7WfOezRp8sQYZFTwXLQfdNVyeOlA9q579TTlNBdSYtwgB9cVXluJJJtzMfaoO+c0mQfrWntpdyfZouJqEifK3zfWle/kZCqbVz1qnk5HSnbeR0o9sw9mh8crxPuXr3q2NSK43KOnpVDGO9Jz7Gj2zD2aJJLhppdzN0PFWY9QdRgqH+tUW9cCnLwKPbMPZRLc17JKm0fKvcCooLhoCxTuuKgJagZHIo9tLuHskGSAGY5yc/SrM920yKh6Cqw604/KKj2jvcfs0yxFfSwjAOR6VIdVnx8i4AqiQMZHWkViwOe1ae2l3F7JE7TtLIZD9/1qwL+UAbuSPaqecD0pBjPNL20u4eyRNNcvNw7Ej0qNQDTMZNO7ik5N7lJJCggZzzWromN7YGKy8KO1auin96wA/Gu7Lv4xz4j4TdHIoFC9KBX1SPG6ju1GBRS4pjFyKQ8mjFGKADoaXPNBFN2n1oAU04dKbjNKQfWgBu35uelPAFM68UoGKAFooopgcHikK4YGjmkr4M+jGTZIzUiYCe9NkXgUqjikwFBpScmkpQOaQDsZFN2U5zxSAEjrRqKwmeaUnIoxTTxRqMXtS9qaM5p1ANjaUZANAp4oEID60uaQjIoAx1oEL2poO05pSQelA461S0AQFo5BLG2GBzW7a+IY3IjkQ5A6msPI9KNgzkcGunDYuVGW5lVoxqI7OOeOcZDr+dSZOK4lJZ4n+WQ4q0mq3a8b+PevZp5pFrU4Hg5dDq92BSZB71y7a3cqwGQRSnWbphwQK0/tKmT9TqHUngcVDLMkQyzADvzXNnVrpjgnFU5pppWO+RqzrZnTS90uGCn1Nm61pQGjtgc9yaw23SPvkJJJpFQjoc0/a1eNiMZKsztp0FDcTJ9KM5FKDQVOeK4zcaelKOlLgUtFguIOlIOtLx2o7UxCGnZ5FJ2pKAHUuKaDTs5oAMCm96caSgEtRVINDHjFIq+hpcHHNADBwKBknGKeOOaYTk+lMBxGKQ+oowR1NLx2pMBOWOaKUcGmnk0gHBqcGpmKXpVWAft+atTQpAzyD0rLVtx+laGg8TSD1r0Mtt7Y58R8J0S9KUHrSLwKAOtfVo8Z7js0oNNAxS0wHZpM00HNOAxQAUtFFABQx54opRQAmKKU0lABRRRTA4LdntS/Wm85wKMHua+DPowkPyUqEbaawypxTEkweeKTAl6dacCD0qJ3U45pyuoHWkArgnilAKio2kB6GnK2RQA7NLkGmkZpOV96AH0hzQPU0MwoBhmjJPQ00ZNKFPrQKw8Nik5J5oPSkzkYoCwYOfSn9OpzUTPtxk0GVB3p3AlzSHrUYmXPWl8wGi4EhxjFJ24qIzL70CVcd6Vx3H96U4AqLzVJpTIDRcLskHrTifWohKoFJ5qMaLIWrJNwHSjeajLoKXzF9KAHgjNPyarh/an+YB2oAeaUEd6j85fSmGYE0AS55px5FQeYD6/lSiZc9DQBL25puRnrTTMM8Ck8z2oAkxTh9ag82kE2exoAsFhR1FQmQEdMUCYKOhoAkB2mn5J71XEg7inrKAOhNAE2cCmEA9qYZM9jQHx2oESBadt4qATHP3TU6rJIvyIW+lUotsBtIOvWpBBMePKaj7LNn/VNWnsJ9ieePcYTS4BpTbT5x5TU9LW5Y4ER/Kn7Cp2F7SPcj4QEg1p6EczOcVQFheT5QRFfeui0+yFnbqjEb/WvSy7DyVTmkjlxNWNrXL+KUU0Glr6NHldR3WkpAM0pUjvTGIKdmminYoAWikz2paACigDNITg0ALSd6UUuaAEoozRRcDged+aUgnrSilNfCH0YxeO9JtXBJWnfKKcSMUmBAYQaBEAKnHSmc5pAQtHn2pMmMZ5IqwcY5poxtKkcGgAVtwGDk04Z6d6hx5T5HSpAwYls0AKQaAM96dweaM+woAQAilGfrS7gaNwXgUANJIHQ0x5MDjrQ8vl8dSaROTuIoAYsTMcsalEK96kFJzmgQ0wqOg5pQpH8NSdB700saAE2j/IpQg9M0p7UoOKAIyg9KMAdqcTzRQAwpntQsftT80mTQAhT2oKjHQU5c5pTQBGEp236U/tSHqKAIyvsKcqqB0FDUooAAopCoz0pw60N1oAaUBPSjZTh1oJoAaqDuBTgoHQClpRQAwpxnApAg9BUlNHWgBmznoKeFwcYFL3pSSCMCgBHHsKbs9qkJPpSbs8UCGYGK0NGuVS48tv4jVAq+eMYqxYqBqMPrXXhf4iuZ1VeDOrCoGPA59qXavt+VN7/jTj1FfWxp07JtHic71VxCg3dvypwXB4A/KlNFV7OHYOZ9wCqvI60wsWbIHPvTxyadtFNQhHVIltsYKd2pKcvSqWoAOKU8ig9KD04oAaKXNFFMB2KKO1Lj1oABTT1p1IaADtSZpTSDrSAQMfSinUUWA4InaaDnHWg8+9AJzyK+CPoxMDHI5p20YppHzU/oKBMBn0oIHY0ucikHFADcUHBpx5FMAJ7UAKUDDFMaDavDVJ0peKAK4V9pxRsf8AvGrGDng8elJnnpQBCI5PWl2SHq1S4PrRt460ARrDnlualCkDANIF96CSKAFozim7j6UvWgBdwqTC7elRY5p+eMUANpRTcilDUALgZpM80ucimEUAKCPSncY4plOzxQA4EY96KaBzRv5xigQ48CkXnOaTPqacCO1IBppR9KCDjNAzTASkJJbFLx60Yyc0DA/LQOaMZpQMUAJyKUNSYNHAoQJXAtxSrjvSZFMJLOqKOWNEVdik7D85cRjlj0Ip91bT2yo7AlT3re0rTYrZN7Dcx7mtGW3hu4ijr8tetTy2UocxxzxVpWONRg/INLwKs6lp32GbfGf3Z7VUU7hXnVKTpy5WdUZ86uPI4qfT1/4mER96rkH1qWxDfb4ef4q0w38VE1fgZ2AHr60Ec0o7Cg19kuh4TW4UhpaRjg1QhwpaQcijFAxcCk6HiloxQAE8Ugal7UADNAC4FKAKKKAF6UHpSUuaYBSUuaSgApcCm55p3WgBKKMUUAcFnAozmgUY9DXwVj6K4DmndqaBz1p3ekAUUjUA0AKMZ5pAFGeaDxzSdaAFJFL2ptGcCgBQxBpWPHSmqadQAdqQ80p6U3OKAAcd6Xg0mO9FAD8+1IVpOaM+tAC57Up+7SAc5pR0NACYFIcClzR1oAB0pCaXPajigBtFFFADgaCtNpw+tAhNtHSlzmk5FADgc8UEkd6TgUnU0AHHpRmlNBHFAwHWlyKaDxQOvNADs5pNtOpAru21ELH1FCQDTx1oXMcgI++ORTHDo+HUgj2pd/zDALFu+K2jCa1SJcktzY0y9uHugjsSncYrW1GdobR3RsEdDiqGj2MkKmeXq3Sm63eBYhbLyzcn6V7sKkqWG5pPU8+rFTn7pkSXc91GPNfcuaaAAOlIFAAUdKXpXgSk5O7PQjFJJCnpT7Q4vof96mHpTrVsX8A/2q0w7tURFX4GdmnIyaQ9aFOV6UtfZr7J4b6gKUjIoFKTVCEFLSYpaAFVuuaTOaKUUAA60bQe9LSDrQAvSig0goAWikPSlHSmAnNHNGacDkUAJSg0hoFAC0UUUAcDSHAozScHrXwdj6Kwozmnio+c+1PBIpMLCHOaUCjNJuFILDqb0NG6jHvQFhRSkCmg0uc0BYMADNJyelKaMUBYTJ70HntRlvSk6daAsKKO9A6UnfigBdxoznrRzSj3oEO7UlLSUAG2jafWk59aXn1oGIVNGKXkHk0ZGKAGjrS00HmndaVwF2gikC80ZxQOvWmApwOlHJpuOetPB4oAb1NLtI700jvmlyaBXFpM0ZpM80AOAoxR/DTefWgY7NKkjxSCRe1No71UZcruJq6NlL2yljBmUBu9SxXOmRcrtPtWCxX8aaFU845rvhi7LY55Ye/U3LvXEMfl2ozj9Kx97yStLIcufWmbQOnH0p2496xr4mVXToXTpRp7CBucU6m4FFcpqP7VJahft0PHO6oS2Kfbn/ToD/tVvh/4iMqrXIzs84AxSbqADjFLivtFqos8R7tDh0pG60o6UMOaYg7UvakzS9qACikBp2KAAHmlxSdOaTd7UAOopAc07FADW6UDpS0AYNMBMUvSlwaSgBO9LSd6WgAooooA4HHtRj2NFL/Ca+Cuz6PlEpcH1poqUdKLsTiNJyKQDHWloFGocohwBSZp5oPSjUOUaDnigLSj74pT1NOxVkIFoBzxR3py/eosS0MwwPJ4oP0p3c0q9DQ0JIYB8vI4o+lPU8Ckapsx2Qzn1pQM96QmgHiiwrId0PWkJNJ2pKaQWQ4GnAgmmjpTTT5QsSnrTR0NNQnd1oB+ZqOULChacRUYJx1oJPrS5QsPJ4xSgVEScdaQMc9TT5QJDnPQ0oJAxiq247jyacCc9TRyhexNyOeaMZqMk+tNyfU0coufyJsexpQPY1ECcdaAT60+UOfyJqSoiT600HmjlHe5YPSmqu45JNMpMmmohexMVPtigAVDk+tNzzVWFz+RYoqNTQ1HKLnJOvSkpg6UopqAc48gAZNT6dC097GR0Vqr9xXS6MihMhRn6V2YSknVVznrz9w0gSFxRQP4qcK+pWlkeS97gKG60tIetMQlO7U0dadQAgpwplOXoaAHGkxSjpRQAgpc0UUwF4oyKaelJQA7n1oo7UUAJ3paTvS0AFFFFAH/2Q==',\n", + " 'file-path': 'D:\\\\DEV PATEL\\\\2025\\\\scratch_VLM\\\\scratch_agent\\\\blocks\\\\out_json\\\\test\\\\figure-1-4.jpg',\n", + " 'pdf-id': '9f36b3cde65f4356b896f17bac0a83c5',\n", + " 'image-uuid': '674b445e-406b-48fe-92e3-3ff40fe262af'}}" + ] + }, + "execution_count": 38, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "output_path" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c5c57bdf", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import json\n", + "import numpy as np\n", + "from io import BytesIO\n", + "from PIL import Image\n", + "from pathlib import Path\n", + "import shutil\n", + "\n", + "def hybrid_similarity_matching(sprite_images_bytes, sprite_ids, \n", + " min_similarity=None, top_k=5, method_weights=(0.5, 0.3, 0.2)):\n", + " import imagehash as phash\n", + " from image_match.goldberg import ImageSignature\n", + " import math\n", + " \n", + " backdrop_base_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\"\n", + " sprite_base_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\"\n", + " code_blocks_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\"\n", + " out_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\"\n", + " \n", + " \n", + " # Load reference data [change prod BLOCKS_DIR]\n", + " embeddings_path = os.path.join(out_path, \"hybrid_embeddings.json\")\n", + " hash_path = os.path.join(out_path, \"phash_data.json\") \n", + " signature_path = os.path.join(out_path, \"signature_data.json\")\n", + " # Load embeddings\n", + " with open(embeddings_path, \"r\", encoding=\"utf-8\") as f:\n", + " embedding_json = json.load(f)\n", + " \n", + " # Load phash data (if exists)\n", + " hash_dict = {}\n", + " if os.path.exists(hash_path):\n", + " with open(hash_path, \"r\", encoding=\"utf-8\") as f:\n", + " hash_data = json.load(f)\n", + " for path, hash_str in hash_data.items():\n", + " try:\n", + " hash_dict[path] = phash.hex_to_hash(hash_str)\n", + " except Exception:\n", + " pass\n", + " \n", + " # Load signature data (if exists)\n", + " signature_dict = {}\n", + " gis = ImageSignature()\n", + " if os.path.exists(signature_path):\n", + " with open(signature_path, \"r\", encoding=\"utf-8\") as f:\n", + " sig_data = json.load(f)\n", + " for path, sig_list in sig_data.items():\n", + " try:\n", + " signature_dict[path] = np.array(sig_list)\n", + " except Exception:\n", + " pass\n", + " \n", + " # Parse embeddings\n", + " paths_list = []\n", + " embeddings_list = []\n", + " if isinstance(embedding_json, dict):\n", + " for p, emb in embedding_json.items():\n", + " if isinstance(emb, dict):\n", + " maybe_emb = emb.get(\"embedding\") or emb.get(\"embeddings\") or emb.get(\"emb\")\n", + " if maybe_emb is None:\n", + " continue\n", + " arr = np.asarray(maybe_emb, dtype=np.float32)\n", + " elif isinstance(emb, list):\n", + " arr = np.asarray(emb, dtype=np.float32)\n", + " else:\n", + " continue\n", + " paths_list.append(os.path.normpath(str(p)))\n", + " embeddings_list.append(arr)\n", + " elif isinstance(embedding_json, list):\n", + " for item in embedding_json:\n", + " if not isinstance(item, dict):\n", + " continue\n", + " p = item.get(\"path\") or item.get(\"image_path\") or item.get(\"file\") or item.get(\"filename\") or item.get(\"img_path\")\n", + " emb = item.get(\"embeddings\") or item.get(\"embedding\") or item.get(\"features\") or item.get(\"vector\") or item.get(\"emb\")\n", + " if p is None or emb is None:\n", + " continue\n", + " paths_list.append(os.path.normpath(str(p)))\n", + " embeddings_list.append(np.asarray(emb, dtype=np.float32))\n", + " \n", + " if len(paths_list) == 0:\n", + " raise RuntimeError(\"No reference images/embeddings found\")\n", + " ref_matrix = np.vstack(embeddings_list).astype(np.float32) # shape: (num_refs, emb_dim)\n", + " \n", + " # Batch: Get all sprite embeddings, phash, sigs first\n", + " sprite_emb_list = []\n", + " sprite_phash_list = []\n", + " sprite_sig_list = []\n", + " for i, sprite_bytes in enumerate(sprite_images_bytes):\n", + " # sprite_pil = Image.open(sprite_bytes)\n", + " # enhanced_sprite = process_image_cv2_from_pil(sprite_pil, scale=2) or sprite_pil\n", + " # # sprite_emb = get_dinov2_embedding_from_pil(preprocess_for_model(enhanced_sprite)) or np.zeros(ref_matrix.shape[1])\n", + " # # sprite_emb_list.append(sprite_emb)\n", + " # sprite_emb = get_dinov2_embedding_from_pil(preprocess_for_model(enhanced_sprite))\n", + " # sprite_emb = sprite_emb if sprite_emb is not None else np.zeros(ref_matrix.shape[1])\n", + " # sprite_emb_list.append(sprite_emb)\n", + " # # Perceptual hash\n", + " # sprite_hash_arr = preprocess_for_hash(enhanced_sprite)\n", + " # sprite_phash = None\n", + " # if sprite_hash_arr is not None:\n", + " # try: sprite_phash = phash.encode_image(image_array=sprite_hash_arr)\n", + " # except: pass\n", + " # sprite_phash_list.append(sprite_phash)\n", + " # # Signature\n", + " # sprite_sig = None\n", + " # try:\n", + " # temp_path = f\"temp_sprite_{i}.png\"\n", + " # enhanced_sprite.save(temp_path, format=\"PNG\")\n", + " # sprite_sig = ImageSignature().generate_signature(temp_path)\n", + " # os.remove(temp_path)\n", + " # except: pass\n", + " # sprite_sig_list.append(sprite_sig)\n", + "\n", + " # Embedding similarity: batch matrix multiply\n", + " sprite_emb_array = np.stack(sprite_emb_list)\n", + " sprite_emb_array_norm = sprite_emb_array / np.linalg.norm(sprite_emb_array, axis=1, keepdims=True)\n", + " ref_matrix_norm = ref_matrix / np.linalg.norm(ref_matrix, axis=1, keepdims=True)\n", + " embedding_similarities = np.matmul(sprite_emb_array_norm, ref_matrix_norm.T)\n", + " embedding_similarities = np.clip(embedding_similarities, 0, 1)\n", + "\n", + " # For each sprite, use the advanced re-ranker\n", + " per_sprite_final_indices = []\n", + " per_sprite_final_scores = []\n", + " per_sprite_rerank_debug = []\n", + "\n", + " for i, sprite_id in enumerate(sprite_ids):\n", + " # Build results for this sprite\n", + " embedding_results = [(paths_list[j], float(embedding_similarities[i, j])) for j in range(len(paths_list))]\n", + " phash_results = []\n", + " imgmatch_results = []\n", + " sprite_phash = sprite_phash_list[i]\n", + " sprite_sig = sprite_sig_list[i]\n", + " for j, ref_path in enumerate(paths_list):\n", + " # Phash\n", + " ph_sim = 0.0\n", + " hd = None\n", + " if sprite_phash is not None and ref_path in hash_dict:\n", + " try:\n", + " ref_hash = hash_dict[ref_path]\n", + " hd = phash.hamming_distance(sprite_phash, ref_hash)\n", + " ph_sim = max(0.0, 1.0 - (hd / 64.0))\n", + " except: pass\n", + " phash_results.append((ref_path, hd if hd is not None else 1000, ph_sim))\n", + " # Signature\n", + " sig_sim = 0.0\n", + " dist = None\n", + " if sprite_sig is not None and ref_path in signature_dict:\n", + " try:\n", + " ref_sig = signature_dict[ref_path]\n", + " dist = ImageSignature().normalized_distance(ref_sig, sprite_sig)\n", + " sig_sim = max(0.0, 1.0 - dist)\n", + " except: pass\n", + " imgmatch_results.append((ref_path, dist if dist is not None else 1000, sig_sim))\n", + "\n", + " # Call the advanced re-ranker\n", + " rerank_result = choose_top_candidates(embedding_results, phash_results, imgmatch_results,\n", + " top_k=top_k, method_weights=method_weights, verbose=True)\n", + " per_sprite_rerank_debug.append(rerank_result)\n", + "\n", + " # Selection logic: prefer consensus, else weighted top-1\n", + " final = None\n", + " if len(rerank_result[\"consensus_topk\"]) > 0:\n", + " consensus = rerank_result[\"consensus_topk\"]\n", + " best = max(consensus, key=lambda p: rerank_result[\"weighted_scores_full\"].get(p, 0.0))\n", + " final = best\n", + " else:\n", + " final = rerank_result[\"weighted_topk\"][0][0] if rerank_result[\"weighted_topk\"] else None\n", + "\n", + " # Store index and score for downstream use\n", + " if final is not None and final in paths_list:\n", + " idx = paths_list.index(final)\n", + " score = rerank_result[\"weighted_scores_full\"].get(final, 0.0)\n", + " per_sprite_final_indices.append([idx])\n", + " per_sprite_final_scores.append([score])\n", + " print(f\"Sprite '{sprite_id}' FINAL selected: {final} (index {idx}) score={score:.4f}\")\n", + " else:\n", + " per_sprite_final_indices.append([])\n", + " per_sprite_final_scores.append([])\n", + "\n", + " return per_sprite_final_indices, per_sprite_final_scores, paths_list#, per_sprite_rerank_debug\n", + "\n", + "def similarity_matching(sprites_data: dict, project_folder: str, top_k: int = 1, min_similarity: float = None) -> str:\n", + " print(\"🔍 Running similarity matching…\")\n", + " os.makedirs(project_folder, exist_ok=True)\n", + "\n", + " # backdrop_base_path = os.path.normpath(str(BACKDROP_DIR))\n", + " # sprite_base_path = os.path.normpath(str(SPRITE_DIR))\n", + " # code_blocks_path = os.path.normpath(str(CODE_BLOCKS_DIR))\n", + " backdrop_base_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\"\n", + " sprite_base_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\"\n", + " code_blocks_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\"\n", + " out_path = r\"D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\"\n", + "\n", + " project_json_path = os.path.join(project_folder, \"project.json\")\n", + "\n", + " # -------------------------\n", + " # Build sprite images list (BytesIO) from sprites_data\n", + " # -------------------------\n", + " sprite_ids, sprite_base64 = [], []\n", + " for sid, sprite in sprites_data.items():\n", + " sprite_ids.append(sid)\n", + " sprite_base64.append(sprite[\"base64\"])\n", + " \n", + " print(\"sprite_ids\",sprite_ids)\n", + " print(\"sprite_base64\",sprite_base64)\n", + "\n", + " # sprite_images_bytes = []\n", + " # for b64 in sprite_base64:\n", + " # img = Image.open(BytesIO(base64.b64decode(b64.split(\",\")[-1]))).convert(\"RGB\")\n", + " # buffer = BytesIO()\n", + " # img.save(buffer, format=\"JPEG\")\n", + " # buffer.seek(0)\n", + " # sprite_images_bytes.append(buffer) \n", + " \n", + " \n", + " # print(\"sprite_base64\",sprite_images_bytes) \n", + "\n", + "\n", + " # Use hybrid matching system\n", + " per_sprite_matched_indices, per_sprite_scores, paths_list = hybrid_similarity_matching(\n", + " sprite_base64, sprite_ids, min_similarity, top_k, method_weights=(0.5, 0.3, 0.2)\n", + " )\n", + "\n", + " # =========================================\n", + " # Copy matched sprite assets + collect data\n", + " # =========================================\n", + " project_data = []\n", + " backdrop_data = []\n", + " copied_sprite_folders = set()\n", + " copied_backdrop_folders = set()\n", + "\n", + " matched_indices = sorted({idx for lst in per_sprite_matched_indices for idx in lst})\n", + " print(\"matched_indices------------------>\",matched_indices)\n", + "\n", + " sprite_base_p = Path(sprite_base_path).resolve(strict=False)\n", + " backdrop_base_p = Path(backdrop_base_path).resolve(strict=False)\n", + " project_folder_p = Path(project_folder)\n", + " project_folder_p.mkdir(parents=True, exist_ok=True)\n", + " \n", + " def display_like_windows_no_lead(p: Path) -> str:\n", + " s = p.as_posix()\n", + " if s.startswith(\"/\"):\n", + " s = s[1:]\n", + " return s.replace(\"/\", \"\\\\\")\n", + " \n", + " def is_subpath(child: Path, parent: Path) -> bool:\n", + " try:\n", + " child.relative_to(parent)\n", + " return True\n", + " except Exception:\n", + " return False\n", + "\n", + " # Copy assets and build project data (unchanged from your version)\n", + " for matched_idx in matched_indices:\n", + " if not (0 <= matched_idx < len(paths_list)):\n", + " print(f\" ⚠ matched_idx {matched_idx} out of range, skipping\")\n", + " continue\n", + " matched_image_path = paths_list[matched_idx]\n", + " matched_path_p = Path(matched_image_path).resolve(strict=False)\n", + " matched_folder_p = matched_path_p.parent\n", + " matched_filename = matched_path_p.name\n", + " matched_folder_display = display_like_windows_no_lead(matched_folder_p)\n", + " print(f\"Processing matched image: {matched_image_path}\")\n", + " print(f\" - Folder: {matched_folder_display}\")\n", + "\n", + " folder_key = matched_folder_p.as_posix()\n", + "\n", + " # SPRITE\n", + " if is_subpath(matched_folder_p, sprite_base_p) and folder_key not in copied_sprite_folders:\n", + " print(f\"Processing SPRITE folder: {matched_folder_display}\")\n", + " copied_sprite_folders.add(folder_key)\n", + " sprite_json_path = matched_folder_p / \"sprite.json\"\n", + " if sprite_json_path.exists() and sprite_json_path.is_file():\n", + " try:\n", + " with sprite_json_path.open(\"r\", encoding=\"utf-8\") as f:\n", + " sprite_info = json.load(f)\n", + " project_data.append(sprite_info)\n", + " print(f\" ✓ Successfully read sprite.json from {matched_folder_display}\")\n", + " except Exception as e:\n", + " print(f\" ✗ Failed to read sprite.json in {matched_folder_display}: {repr(e)}\")\n", + " else:\n", + " print(f\" ⚠ No sprite.json in {matched_folder_display}\")\n", + " try:\n", + " sprite_files = list(matched_folder_p.iterdir())\n", + " except Exception as e:\n", + " sprite_files = []\n", + " print(f\" ✗ Failed to list files in {matched_folder_display}: {repr(e)}\")\n", + " print(f\" Files in sprite folder: {[p.name for p in sprite_files]}\")\n", + " for p in sprite_files:\n", + " fname = p.name\n", + " if fname in (matched_filename, \"sprite.json\"):\n", + " continue\n", + " if p.is_file():\n", + " dst = project_folder_p / fname\n", + " try:\n", + " shutil.copy2(str(p), str(dst))\n", + " print(f\" ✓ Copied sprite asset: {p} -> {dst}\")\n", + " except Exception as e:\n", + " print(f\" ✗ Failed to copy sprite asset {p}: {repr(e)}\")\n", + "\n", + " # BACKDROP\n", + " if is_subpath(matched_folder_p, backdrop_base_p) and folder_key not in copied_backdrop_folders:\n", + " print(f\"Processing BACKDROP folder: {matched_folder_display}\")\n", + " copied_backdrop_folders.add(folder_key)\n", + " backdrop_src = matched_folder_p / matched_filename\n", + " backdrop_dst = project_folder_p / matched_filename\n", + " if backdrop_src.exists() and backdrop_src.is_file():\n", + " try:\n", + " shutil.copy2(str(backdrop_src), str(backdrop_dst))\n", + " print(f\" ✓ Copied matched backdrop image: {backdrop_src} -> {backdrop_dst}\")\n", + " except Exception as e:\n", + " print(f\" ✗ Failed to copy matched backdrop image {backdrop_src}: {repr(e)}\")\n", + " else:\n", + " print(f\" ⚠ Matched backdrop source not found: {backdrop_src}\")\n", + " try:\n", + " backdrop_files = list(matched_folder_p.iterdir())\n", + " except Exception as e:\n", + " backdrop_files = []\n", + " print(f\" ✗ Failed to list files in {matched_folder_display}: {repr(e)}\")\n", + " print(f\" Files in backdrop folder: {[p.name for p in backdrop_files]}\")\n", + " for p in backdrop_files:\n", + " fname = p.name\n", + " if fname in (matched_filename, \"project.json\"):\n", + " continue\n", + " if p.is_file():\n", + " dst = project_folder_p / fname\n", + " try:\n", + " shutil.copy2(str(p), str(dst))\n", + " print(f\" ✓ Copied backdrop asset: {p} -> {dst}\")\n", + " except Exception as e:\n", + " print(f\" ✗ Failed to copy backdrop asset {p}: {repr(e)}\")\n", + " pj = matched_folder_p / \"project.json\"\n", + " if pj.exists() and pj.is_file():\n", + " try:\n", + " with pj.open(\"r\", encoding=\"utf-8\") as f:\n", + " bd_json = json.load(f)\n", + " for tgt in bd_json.get(\"targets\", []):\n", + " if tgt.get(\"isStage\"):\n", + " backdrop_data.append(tgt)\n", + " except Exception as e:\n", + " print(f\" ✗ Failed to read project.json in {matched_folder_display}: {repr(e)}\")\n", + "\n", + " # Final project JSON creation (same as your code)\n", + " final_project = {\n", + " \"targets\": [], \"monitors\": [], \"extensions\": [],\n", + " \"meta\": {\n", + " \"semver\": \"3.0.0\",\n", + " \"vm\": \"11.3.0\",\n", + " \"agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/138.0.0.0 Safari/537.36\"\n", + " }\n", + " }\n", + " for spr in project_data:\n", + " if not spr.get(\"isStage\", False):\n", + " final_project[\"targets\"].append(spr)\n", + " if backdrop_data:\n", + " all_costumes, sounds = [], []\n", + " seen_costumes = set()\n", + " for i, bd in enumerate(backdrop_data):\n", + " for costume in bd.get(\"costumes\", []):\n", + " key = (costume.get(\"name\"), costume.get(\"assetId\"))\n", + " if key not in seen_costumes:\n", + " seen_costumes.add(key)\n", + " all_costumes.append(costume)\n", + " if i == 0:\n", + " sounds = bd.get(\"sounds\", [])\n", + " stage_obj={\n", + " \"isStage\": True,\n", + " \"name\": \"Stage\",\n", + " \"objName\": \"Stage\",\n", + " \"variables\": {}, \n", + " \"lists\": {}, \n", + " \"broadcasts\": {},\n", + " \"blocks\": {}, \n", + " \"comments\": {},\n", + " \"currentCostume\": 1 if len(all_costumes) > 1 else 0,\n", + " \"costumes\": all_costumes,\n", + " \"sounds\": sounds,\n", + " \"volume\": 100, \n", + " \"layerOrder\": 0,\n", + " \"tempo\": 60, \n", + " \"videoTransparency\": 50,\n", + " \"videoState\": \"on\",\n", + " \"textToSpeechLanguage\": None\n", + " }\n", + " final_project[\"targets\"].insert(0, stage_obj)\n", + " else:\n", + " logger.warning(\"⚠️ No backdrop matched. Using default static backdrop.\")\n", + " default_backdrop_path = BACKDROP_DIR / \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + " default_backdrop_name = \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + " default_backdrop_sound = BACKDROP_DIR / \"83a9787d4cb6f3b7632b4ddfebf74367.wav\"\n", + " default_backdrop_sound_name = \"cd21514d0531fdffb22204e0ec5ed84a.svg\"\n", + " try:\n", + " shutil.copy2(default_backdrop_path, os.path.join(project_folder, default_backdrop_name))\n", + " logger.info(f\"✅ Default backdrop copied to project: {default_backdrop_name}\")\n", + " shutil.copy2(default_backdrop_sound, os.path.join(project_folder, default_backdrop_sound_name))\n", + " logger.info(f\"✅ Default backdrop sound copied to project: {default_backdrop_sound_name}\")\n", + " except Exception as e:\n", + " logger.error(f\"❌ Failed to copy default backdrop: {e}\")\n", + " stage_obj={\n", + " \"isStage\": True,\n", + " \"name\": \"Stage\",\n", + " \"objName\": \"Stage\",\n", + " \"variables\": {},\n", + " \"lists\": {},\n", + " \"broadcasts\": {},\n", + " \"blocks\": {},\n", + " \"comments\": {},\n", + " \"currentCostume\": 0,\n", + " \"costumes\": [\n", + " {\n", + " \"assetId\": default_backdrop_name.split(\".\")[0],\n", + " \"name\": \"defaultBackdrop\",\n", + " \"md5ext\": default_backdrop_name,\n", + " \"dataFormat\": \"svg\",\n", + " \"rotationCenterX\": 240,\n", + " \"rotationCenterY\": 180\n", + " }\n", + " ],\n", + " \"sounds\": [\n", + " {\n", + " \"name\": \"pop\",\n", + " \"assetId\": \"83a9787d4cb6f3b7632b4ddfebf74367\",\n", + " \"dataFormat\": \"wav\",\n", + " \"format\": \"\",\n", + " \"rate\": 48000,\n", + " \"sampleCount\": 1123,\n", + " \"md5ext\": \"83a9787d4cb6f3b7632b4ddfebf74367.wav\"\n", + " }\n", + " ],\n", + " \"volume\": 100,\n", + " \"layerOrder\": 0,\n", + " \"tempo\": 60,\n", + " \"videoTransparency\": 50,\n", + " \"videoState\": \"on\",\n", + " \"textToSpeechLanguage\": None\n", + " }\n", + " final_project[\"targets\"].insert(0, stage_obj)\n", + "\n", + " with open(project_json_path, 'w') as f:\n", + " json.dump(final_project, f, indent=2)\n", + "\n", + " return project_json_path" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "id": "efab842d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "🔍 Running similarity matching…\n", + "\n", + "--- Query/Search Phase ---\n", + "\n", + "Top results by DINOv2 Embeddings:\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\db4d97cbf24e2b8af665bfbf06f67fa0.png | Cosine: 0.5860\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Ben.sprite3\\9f9f88aea3457084d8d734040b0b9067.png | Cosine: 0.5819\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Ben.sprite3\\2cd77b8a9961e7ad4da905e7731b7c1b.png | Cosine: 0.5727\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Kiran.sprite3\\b0566e0eed7b5216b92d61468d21ecee.png | Cosine: 0.5724\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Kiran.sprite3\\7c0bedab5404830a5147cc4a2d46e997.png | Cosine: 0.5715\n", + "\n", + "Top results by PHash (Hamming distance & normalized sim):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Bear-walking.sprite3\\f36c80d2e731be95df7ec6d07f89fa00.png | Hamming: 18 | NormSim: 0.7188\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Ten80 Dance.sprite3\\e06ac61e96e3a5abf4ca0863816f5d28.png | Hamming: 18 | NormSim: 0.7188\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Bear-walking.sprite3\\6d4d06e3f4cd0c9455b777b9a40782b6.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Drums Conga.sprite3\\2b2eacfce0fb1af023e6ca0f5ef6defe.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Glow-7.sprite3\\6194b9a251a905d0001a969990961724.png | Hamming: 20 | NormSim: 0.6875\n", + "\n", + "Top results by ImageSignature (normalized similarity = 1 - distance):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Arctic.sb3\\67e0db3305b3c8bac3a363b1c428892e.png | NormDist: None | NormSim: 0.0000\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Baseball 1.sb3\\825d9b54682c406215d9d1f98a819449.png | NormDist: None | NormSim: 0.0000\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Baseball 2.sb3\\7be1f5b3e682813dac1f297e52ff7dca.png | NormDist: None | NormSim: 0.0000\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Basketball 1.sb3\\ae21eac3d1814aee1d37ae82ea287816.png | NormDist: None | NormSim: 0.0000\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Basketball 2.sb3\\a5865738283613a2725b2c9dda6d8c78.png | NormDist: None | NormSim: 0.0000\n", + "\n", + "Top results by Combined Score (avg of embedding|phash|image-match):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\db4d97cbf24e2b8af665bfbf06f67fa0.png | Combined: 0.3933 | emb: 0.5860 | phash_sim: 0.5938 | imgmatch_sim: 0.0000\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Ben.sprite3\\2cd77b8a9961e7ad4da905e7731b7c1b.png | Combined: 0.3888 | emb: 0.5727 | phash_sim: 0.5938 | imgmatch_sim: 0.0000\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Kiran.sprite3\\2928e9fbd5ca08e326192b3a41bea691.png | Combined: 0.3876 | emb: 0.4753 | phash_sim: 0.6875 | imgmatch_sim: 0.0000\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Ben.sprite3\\165d993c30dfdb9e829d0d98867d7826.png | Combined: 0.3856 | emb: 0.5318 | phash_sim: 0.6250 | imgmatch_sim: 0.0000\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Tera.sprite3\\5456a723f3b35eaa946b974a59888793.png | Combined: 0.3836 | emb: 0.4947 | phash_sim: 0.6562 | imgmatch_sim: 0.0000\n", + "\n", + "Search complete.\n", + "\n", + "Top by Weighted Normalized Average (weights emb,ph,img = 0.50,0.30,0.20):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\db4d97cbf24e2b8af665bfbf06f67fa0.png score=0.9077 emb=1.000 ph=0.692 im=1.000\n", + "\n", + "Top by Rank-sum (lower is better):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Avery Walking.sprite3\\dc6a584704c09a3fbafb9825635a9fd4.png rank_sum=208 emb_rank=50 ph_rank=29 img_rank=129\n", + "\n", + "Top by Harmonic mean (requires non-zero on all metrics):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Kiran.sprite3\\2928e9fbd5ca08e326192b3a41bea691.png harm=0.9046 emb=0.811 ph=0.923 im=1.000\n", + "\n", + "Consensus (in top-1 of ALL metrics): []\n", + "Sprite '['Sprite 1', 'Sprite 2', 'Sprite 3', 'Sprite 4']' FINAL selected: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\db4d97cbf24e2b8af665bfbf06f67fa0.png (index 570) score=0.9077\n", + "\n", + "--- Query/Search Phase ---\n", + "\n", + "Top results by DINOv2 Embeddings:\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\\04d18ddd1b85f0ea30beb14b8da49f60.png | Cosine: 0.6968\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Farm.sb3\\1e8a70bd07f1dcba3383883f3b948266.png | Cosine: 0.5453\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Space City 1.sb3\\20344b0edcc498281e4cb80242a72667.png | Cosine: 0.5114\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Wetland.sb3\\ef9973bcff6d4cbc558e946028ec7d23.png | Cosine: 0.4301\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Room 1.sb3\\a81668321aa3dcc0fc185d3e36ae76f6.png | Cosine: 0.4222\n", + "\n", + "Top results by PHash (Hamming distance & normalized sim):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\\04d18ddd1b85f0ea30beb14b8da49f60.png | Hamming: 12 | NormSim: 0.8125\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Dinosaur5.sprite3\\9d200a7c2e93eac8cf52ede3a87d7969.png | Hamming: 16 | NormSim: 0.7500\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Champ99.sprite3\\26fdff424232926001d20041c3d5673b.png | Hamming: 18 | NormSim: 0.7188\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Marian.sprite3\\e9577a1eb098905dd386135bb38c0398.png | Hamming: 18 | NormSim: 0.7188\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Dinosaur5.sprite3\\5882227a9e2f0f3b2014c49328969762.png | Hamming: 20 | NormSim: 0.6875\n", + "\n", + "Top results by ImageSignature (normalized similarity = 1 - distance):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Arctic.sb3\\67e0db3305b3c8bac3a363b1c428892e.png | NormDist: None | NormSim: 0.0000\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Baseball 1.sb3\\825d9b54682c406215d9d1f98a819449.png | NormDist: None | NormSim: 0.0000\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Baseball 2.sb3\\7be1f5b3e682813dac1f297e52ff7dca.png | NormDist: None | NormSim: 0.0000\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Basketball 1.sb3\\ae21eac3d1814aee1d37ae82ea287816.png | NormDist: None | NormSim: 0.0000\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Basketball 2.sb3\\a5865738283613a2725b2c9dda6d8c78.png | NormDist: None | NormSim: 0.0000\n", + "\n", + "Top results by Combined Score (avg of embedding|phash|image-match):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\\04d18ddd1b85f0ea30beb14b8da49f60.png | Combined: 0.5031 | emb: 0.6968 | phash_sim: 0.8125 | imgmatch_sim: 0.0000\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Farm.sb3\\1e8a70bd07f1dcba3383883f3b948266.png | Combined: 0.3484 | emb: 0.5453 | phash_sim: 0.5000 | imgmatch_sim: 0.0000\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Taco.sprite3\\c97113d17afeaac9f461ea0ec257ef26.png | Combined: 0.3475 | emb: 0.3551 | phash_sim: 0.6875 | imgmatch_sim: 0.0000\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Room 1.sb3\\a81668321aa3dcc0fc185d3e36ae76f6.png | Combined: 0.3387 | emb: 0.4222 | phash_sim: 0.5938 | imgmatch_sim: 0.0000\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Dinosaur5.sprite3\\9d200a7c2e93eac8cf52ede3a87d7969.png | Combined: 0.3357 | emb: 0.2571 | phash_sim: 0.7500 | imgmatch_sim: 0.0000\n", + "\n", + "Search complete.\n", + "\n", + "Top by Weighted Normalized Average (weights emb,ph,img = 0.50,0.30,0.20):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\\04d18ddd1b85f0ea30beb14b8da49f60.png score=1.0000 emb=1.000 ph=1.000 im=1.000\n", + "\n", + "Top by Rank-sum (lower is better):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\\04d18ddd1b85f0ea30beb14b8da49f60.png rank_sum=25 emb_rank=1 ph_rank=1 img_rank=23\n", + "\n", + "Top by Harmonic mean (requires non-zero on all metrics):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\\04d18ddd1b85f0ea30beb14b8da49f60.png harm=1.0000 emb=1.000 ph=1.000 im=1.000\n", + "\n", + "Consensus (in top-1 of ALL metrics): []\n", + "Sprite '['Sprite 1', 'Sprite 2', 'Sprite 3', 'Sprite 4']' FINAL selected: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\\04d18ddd1b85f0ea30beb14b8da49f60.png (index 22) score=1.0000\n", + "\n", + "--- Query/Search Phase ---\n", + "\n", + "Top results by DINOv2 Embeddings:\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg | Cosine: 0.6674\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white.png | Cosine: 0.6004\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Blue Sky 2.sb3\\8eb8790be5507fdccf73e7c1570bbbab.png | Cosine: 0.5916\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Button3.sprite3\\a3b357ea21773bcb3545a227ee877e9a.png | Cosine: 0.4592\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Line.sprite3\\e85305b47cfd92d971704dcb7ad6e17b.png | Cosine: 0.4415\n", + "\n", + "Top results by PHash (Hamming distance & normalized sim):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg | Hamming: 6 | NormSim: 0.9062\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Drums Conga.sprite3\\bdad2f140cfbd021f38241fc9acc7fd2.png | Hamming: 16 | NormSim: 0.7500\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Anina Dance.sprite3\\d86bb27b4f8d7b70c39c96f29c6943b4.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Butterfly 2.sprite3\\e96f4c6913107c9b790d37bb65507c14.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Button1.sprite3\\21fb7fa07eac4794fded0be4e18e20a2.png | Hamming: 20 | NormSim: 0.6875\n", + "\n", + "Top results by ImageSignature (normalized similarity = 1 - distance):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Arctic.sb3\\67e0db3305b3c8bac3a363b1c428892e.png | NormDist: None | NormSim: 0.0000\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Baseball 1.sb3\\825d9b54682c406215d9d1f98a819449.png | NormDist: None | NormSim: 0.0000\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Baseball 2.sb3\\7be1f5b3e682813dac1f297e52ff7dca.png | NormDist: None | NormSim: 0.0000\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Basketball 1.sb3\\ae21eac3d1814aee1d37ae82ea287816.png | NormDist: None | NormSim: 0.0000\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Basketball 2.sb3\\a5865738283613a2725b2c9dda6d8c78.png | NormDist: None | NormSim: 0.0000\n", + "\n", + "Top results by Combined Score (avg of embedding|phash|image-match):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg | Combined: 0.5246 | emb: 0.6674 | phash_sim: 0.9062 | imgmatch_sim: 0.0000\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Blue Sky 2.sb3\\8eb8790be5507fdccf73e7c1570bbbab.png | Combined: 0.3847 | emb: 0.5916 | phash_sim: 0.5625 | imgmatch_sim: 0.0000\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white.png | Combined: 0.3564 | emb: 0.6004 | phash_sim: 0.4688 | imgmatch_sim: 0.0000\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Line.sprite3\\e85305b47cfd92d971704dcb7ad6e17b.png | Combined: 0.3451 | emb: 0.4415 | phash_sim: 0.5938 | imgmatch_sim: 0.0000\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Clouds.sprite3\\9f5958f46d21e33d3f6d7caffbe0daa9.png | Combined: 0.3413 | emb: 0.3676 | phash_sim: 0.6562 | imgmatch_sim: 0.0000\n", + "\n", + "Search complete.\n", + "\n", + "Top by Weighted Normalized Average (weights emb,ph,img = 0.50,0.30,0.20):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg score=1.0000 emb=1.000 ph=1.000 im=1.000\n", + "\n", + "Top by Rank-sum (lower is better):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg rank_sum=100 emb_rank=1 ph_rank=1 img_rank=98\n", + "\n", + "Top by Harmonic mean (requires non-zero on all metrics):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg harm=1.0000 emb=1.000 ph=1.000 im=1.000\n", + "\n", + "Consensus (in top-1 of ALL metrics): []\n", + "Sprite '['Sprite 1', 'Sprite 2', 'Sprite 3', 'Sprite 4']' FINAL selected: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg (index 990) score=1.0000\n", + "\n", + "--- Query/Search Phase ---\n", + "\n", + "Top results by DINOv2 Embeddings:\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\script9.JPG | Cosine: 0.6961\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\script1.JPG | Cosine: 0.6645\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\script8.JPG | Cosine: 0.6561\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\script3.JPG | Cosine: 0.6487\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\script2.JPG | Cosine: 0.6342\n", + "\n", + "Top results by PHash (Hamming distance & normalized sim):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Princess.sprite3\\23330150c0a09180083b597cbfeca99a.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Story-A.sprite3\\4b1beecd9a8892df0918242b2b5fbd4c.png | Hamming: 20 | NormSim: 0.6875\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Anina Dance.sprite3\\d86bb27b4f8d7b70c39c96f29c6943b4.png | Hamming: 22 | NormSim: 0.6562\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Arrow1.sprite3\\dafcdfda65af14e172809984710f31a9.png | Hamming: 22 | NormSim: 0.6562\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Block-J.sprite3\\8580c990ac918577550165447f870542.png | Hamming: 22 | NormSim: 0.6562\n", + "\n", + "Top results by ImageSignature (normalized similarity = 1 - distance):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Arctic.sb3\\67e0db3305b3c8bac3a363b1c428892e.png | NormDist: None | NormSim: 0.0000\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Baseball 1.sb3\\825d9b54682c406215d9d1f98a819449.png | NormDist: None | NormSim: 0.0000\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Baseball 2.sb3\\7be1f5b3e682813dac1f297e52ff7dca.png | NormDist: None | NormSim: 0.0000\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Basketball 1.sb3\\ae21eac3d1814aee1d37ae82ea287816.png | NormDist: None | NormSim: 0.0000\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Basketball 2.sb3\\a5865738283613a2725b2c9dda6d8c78.png | NormDist: None | NormSim: 0.0000\n", + "\n", + "Top results by Combined Score (avg of embedding|phash|image-match):\n", + "Rank 1: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\client_code_block_1.jpg | Combined: 0.4137 | emb: 0.5850 | phash_sim: 0.6562 | imgmatch_sim: 0.0000\n", + "Rank 2: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\script9.JPG | Combined: 0.3883 | emb: 0.6961 | phash_sim: 0.4688 | imgmatch_sim: 0.0000\n", + "Rank 3: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\script5.JPG | Combined: 0.3879 | emb: 0.6011 | phash_sim: 0.5625 | imgmatch_sim: 0.0000\n", + "Rank 4: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\script7.JPG | Combined: 0.3833 | emb: 0.6186 | phash_sim: 0.5312 | imgmatch_sim: 0.0000\n", + "Rank 5: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\script1.JPG | Combined: 0.3777 | emb: 0.6645 | phash_sim: 0.4688 | imgmatch_sim: 0.0000\n", + "\n", + "Search complete.\n", + "\n", + "Top by Weighted Normalized Average (weights emb,ph,img = 0.50,0.30,0.20):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\client_code_block_1.jpg score=0.8971 emb=0.840 ph=0.923 im=1.000\n", + "\n", + "Top by Rank-sum (lower is better):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\client_code_block_1.jpg rank_sum=98 emb_rank=9 ph_rank=3 img_rank=86\n", + "\n", + "Top by Harmonic mean (requires non-zero on all metrics):\n", + " 1. D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\client_code_block_1.jpg harm=0.9165 emb=0.840 ph=0.923 im=1.000\n", + "\n", + "Consensus (in top-1 of ALL metrics): []\n", + "Sprite '['Sprite 1', 'Sprite 2', 'Sprite 3', 'Sprite 4']' FINAL selected: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\client_code_block_1.jpg (index 978) score=0.8971\n", + "matched_indices------------------> [22, 570, 978, 990]\n", + "Processing matched image: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\\04d18ddd1b85f0ea30beb14b8da49f60.png\n", + " - Folder: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\n", + "Processing BACKDROP folder: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\n", + " ✓ Copied matched backdrop image: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\\04d18ddd1b85f0ea30beb14b8da49f60.png -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\04d18ddd1b85f0ea30beb14b8da49f60.png\n", + " Files in backdrop folder: ['04d18ddd1b85f0ea30beb14b8da49f60.png', '83a9787d4cb6f3b7632b4ddfebf74367.wav', 'cd21514d0531fdffb22204e0ec5ed84a.svg', 'project.json']\n", + " ✓ Copied backdrop asset: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\\83a9787d4cb6f3b7632b4ddfebf74367.wav -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\83a9787d4cb6f3b7632b4ddfebf74367.wav\n", + " ✓ Copied backdrop asset: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\Backdrops\\Colorful City.sb3\\cd21514d0531fdffb22204e0ec5ed84a.svg -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\cd21514d0531fdffb22204e0ec5ed84a.svg\n", + "Processing matched image: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\db4d97cbf24e2b8af665bfbf06f67fa0.png\n", + " - Folder: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\n", + "Processing SPRITE folder: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\n", + " ✓ Successfully read sprite.json from D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\n", + " Files in sprite folder: ['00c8c464c19460df693f8d5ae69afdab.png', '00c8c464c19460df693f8d5ae69afdab.svg', '199b30c8b4fe0642e849924bd1e1b463.wav', '768c4601174f0dfcb96b3080ccc3a192.png', '768c4601174f0dfcb96b3080ccc3a192.svg', '7d91d95d841dc6cf1282914306a4674a.wav', 'a7cc1e5f02b58ecc8095cfc18eef0289.png', 'a7cc1e5f02b58ecc8095cfc18eef0289.svg', 'db4d97cbf24e2b8af665bfbf06f67fa0.png', 'db4d97cbf24e2b8af665bfbf06f67fa0.svg', 'sprite.json']\n", + " ✓ Copied sprite asset: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\00c8c464c19460df693f8d5ae69afdab.png -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\00c8c464c19460df693f8d5ae69afdab.png\n", + " ✓ Copied sprite asset: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\00c8c464c19460df693f8d5ae69afdab.svg -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\00c8c464c19460df693f8d5ae69afdab.svg\n", + " ✓ Copied sprite asset: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\199b30c8b4fe0642e849924bd1e1b463.wav -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\199b30c8b4fe0642e849924bd1e1b463.wav\n", + " ✓ Copied sprite asset: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\768c4601174f0dfcb96b3080ccc3a192.png -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\768c4601174f0dfcb96b3080ccc3a192.png\n", + " ✓ Copied sprite asset: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\768c4601174f0dfcb96b3080ccc3a192.svg -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\768c4601174f0dfcb96b3080ccc3a192.svg\n", + " ✓ Copied sprite asset: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\7d91d95d841dc6cf1282914306a4674a.wav -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\7d91d95d841dc6cf1282914306a4674a.wav\n", + " ✓ Copied sprite asset: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\a7cc1e5f02b58ecc8095cfc18eef0289.png -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\a7cc1e5f02b58ecc8095cfc18eef0289.png\n", + " ✓ Copied sprite asset: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\a7cc1e5f02b58ecc8095cfc18eef0289.svg -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\a7cc1e5f02b58ecc8095cfc18eef0289.svg\n", + " ✓ Copied sprite asset: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\sprites\\Jordyn.sprite3\\db4d97cbf24e2b8af665bfbf06f67fa0.svg -> D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\out_json\\test\\db4d97cbf24e2b8af665bfbf06f67fa0.svg\n", + "Processing matched image: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\client_code_block_1.jpg\n", + " - Folder: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\n", + "Processing matched image: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\\white2.jpg\n", + " - Folder: D:\\DEV PATEL\\2025\\scratch_VLM\\scratch_agent\\blocks\\code_blocks\n" + ] + } + ], + "source": [ + "project_output = similarity_matching(output_path, project_folder)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "640c44ca", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "equal? False\n", + "p_rel is absolute? False\n", + "p_abs is absolute? False\n", + "p_rel is NOT inside p_abs\n" + ] + } + ], + "source": [ + "from pathlib import Path\n", + "\n", + "p_rel = Path(\"app/blocks/Backdrops/Castle 2.sb3\")\n", + "p_abs = Path(\"/app/blocks/Backdrops/Castle 2.sb3\")\n", + "\n", + "print(\"equal? \", p_rel == p_abs) # False\n", + "print(\"p_rel is absolute?\", p_rel.is_absolute())# False\n", + "print(\"p_abs is absolute?\", p_abs.is_absolute())# True\n", + "\n", + "# robust membership test (py3.9+)\n", + "try:\n", + " p_rel.relative_to(p_abs)\n", + " print(\"p_rel is inside p_abs\")\n", + "except Exception:\n", + " print(\"p_rel is NOT inside p_abs\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c64bd46a", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "scratch_env", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.11" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}