{ "cells": [ { "cell_type": "markdown", "id": "8d8da681", "metadata": {}, "source": [ "# Teleagriculture API Tests\n", "\n", "This notebook tests API endpoints to find the board with the most data points." ] }, { "cell_type": "code", "execution_count": 25, "id": "45dc5eca", "metadata": {}, "outputs": [], "source": [ "# Import required libraries\n", "import requests\n", "import json\n", "import pandas as pd\n", "import matplotlib.pyplot as plt\n", "from typing import List, Dict, Optional\n", "from datetime import datetime" ] }, { "cell_type": "markdown", "id": "f61e398c", "metadata": {}, "source": [ "## API Configuration\n", "\n", "Based on the teleagriculture project documentation, these are IoT hardware boards that send data to cloud platforms. This notebook demonstrates how to query a data platform that collects data from multiple teleagriculture boards." ] }, { "cell_type": "code", "execution_count": 26, "id": "0f5ac5fe", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "API: https://kits.teleagriculture.org/api\n", "Auth: none\n" ] } ], "source": [ "# API Configuration for Teleagriculture Kits API (minimal)\n", "BASE_URL = \"https://kits.teleagriculture.org/api\" # official kits API base\n", "\n", "# Optional: put KIT_API_KEY in env to POST; GETs are public per docs (but docs also mention bearer header; we support both)\n", "import os\n", "KIT_API_KEY = os.getenv(\"KIT_API_KEY\")\n", "\n", "HEADERS = {\n", " \"Accept\": \"application/json\",\n", "}\n", "if KIT_API_KEY:\n", " HEADERS[\"Authorization\"] = f\"Bearer {KIT_API_KEY}\"\n", "\n", "print(\"API:\", BASE_URL)\n", "print(\"Auth:\", \"Bearer set\" if \"Authorization\" in HEADERS else \"none\")" ] }, { "cell_type": "code", "execution_count": 27, "id": "9e43c541", "metadata": {}, "outputs": [], "source": [ "# Minimal helpers per official docs\n", "from typing import Tuple, Optional\n", "\n", "def get_kit_info(kit_id: int) -> Optional[dict]:\n", " url = f\"{BASE_URL}/kits/{kit_id}\"\n", " try:\n", " r = requests.get(url, headers=HEADERS, timeout=30)\n", " if r.status_code == 200:\n", " return r.json().get(\"data\")\n", " return None\n", " except requests.RequestException:\n", " return None\n", "\n", "\n", "def count_sensor_measurements(kit_id: int, sensor_name: str, page_size: int = 50, max_pages: int = 200) -> int:\n", " \"\"\"Count all measurements for a kit sensor using cursor pagination.\n", " Limits pages to avoid unbounded runs.\n", " \"\"\"\n", " total = 0\n", " cursor = None\n", " pages = 0\n", " while pages < max_pages:\n", " params = {\"page[size]\": str(page_size)}\n", " if cursor:\n", " params[\"page[cursor]\"] = cursor\n", " url = f\"{BASE_URL}/kits/{kit_id}/{sensor_name}/measurements\"\n", " try:\n", " r = requests.get(url, headers=HEADERS, params=params, timeout=30)\n", " except requests.RequestException:\n", " break\n", " if r.status_code == 404:\n", " break\n", " if r.status_code != 200:\n", " break\n", " try:\n", " body = r.json()\n", " except Exception:\n", " break\n", " data = body.get(\"data\")\n", " if isinstance(data, list):\n", " total += len(data)\n", " else:\n", " break\n", " meta = body.get(\"meta\", {})\n", " cursor = meta.get(\"next_cursor\")\n", " pages += 1\n", " if not cursor:\n", " break\n", " return total" ] }, { "cell_type": "markdown", "id": "3b944747", "metadata": {}, "source": [ "## Fetch Boards Function\n", "\n", "Function to retrieve all registered teleagriculture boards from the data platform API. Each \"board\" represents a deployed IoT device collecting agricultural data." ] }, { "cell_type": "markdown", "id": "43460b20", "metadata": {}, "source": [ "## Fetch all sensors for a kit and count in parallel\n", "\n", "Minimal helpers to grab all sensors from one kit and count each sensor’s datapoints concurrently." ] }, { "cell_type": "code", "execution_count": 28, "id": "bde9a436", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "KIT 1001 BEST {'sensor': 'NH3', 'count': 1221}\n" ] } ], "source": [ "from concurrent.futures import ThreadPoolExecutor, as_completed\n", "\n", "\n", "def get_kit_sensors(kit_id: int) -> list[dict]:\n", " kit = get_kit_info(kit_id)\n", " if not kit:\n", " return []\n", " sensors = kit.get(\"sensors\") or []\n", " # normalize: keep only id and name if present\n", " out = []\n", " for s in sensors:\n", " if isinstance(s, dict) and s.get(\"name\"):\n", " out.append({\"id\": s.get(\"id\"), \"name\": s.get(\"name\")})\n", " return out\n", "\n", "\n", "def count_all_sensors_for_kit(kit_id: int, page_size: int = 50, max_workers: int = 8) -> dict:\n", " sensors = get_kit_sensors(kit_id)\n", " if not sensors:\n", " return {\"kit_id\": kit_id, \"counts\": {}, \"best\": None}\n", "\n", " counts: dict[str, int] = {}\n", " best = {\"sensor\": None, \"count\": -1}\n", "\n", " def _worker(sname: str) -> tuple[str, int]:\n", " c = count_sensor_measurements(kit_id, sname, page_size=page_size)\n", " return sname, c\n", "\n", " with ThreadPoolExecutor(max_workers=max_workers) as ex:\n", " futures = {ex.submit(_worker, s[\"name\"]): s[\"name\"] for s in sensors}\n", " for fut in as_completed(futures):\n", " sname = futures[fut]\n", " try:\n", " sname, c = fut.result()\n", " counts[sname] = c\n", " if c > best[\"count\"]:\n", " best = {\"sensor\": sname, \"count\": c}\n", " except Exception:\n", " counts[sname] = 0\n", " return {\"kit_id\": kit_id, \"counts\": counts, \"best\": best}\n", "\n", "# minimal run example (change the kit id here)\n", "one_kit_result = count_all_sensors_for_kit(1001, page_size=50)\n", "print(\"KIT\", one_kit_result[\"kit_id\"], \"BEST\", one_kit_result[\"best\"])" ] }, { "cell_type": "code", "execution_count": 29, "id": "76457d0a", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "πŸ“‘ Board fetching function defined successfully!\n", "🌿 Ready to query teleagriculture board data from platform API.\n" ] } ], "source": [ "def fetch_all_boards() -> List[Dict]:\n", " \"\"\"\n", " Fetch all registered teleagriculture boards from the data platform.\n", " \n", " Returns:\n", " List[Dict]: List of board objects with metadata, or empty list if error occurs\n", " \"\"\"\n", " try:\n", " # Common API endpoints for IoT platforms that might host teleagriculture data\n", " possible_endpoints = [\n", " \"/devices\", # Common IoT platform endpoint\n", " \"/boards\", # Board-specific endpoint\n", " \"/nodes\", # LoRaWAN nodes\n", " \"/sensors\", # Sensor networks\n", " \"/stations\" # Weather/agri stations\n", " ]\n", " \n", " for endpoint in possible_endpoints:\n", " try:\n", " url = f\"{BASE_URL}{endpoint}\"\n", " response = requests.get(url, headers=HEADERS, timeout=30)\n", " \n", " if response.status_code == 200:\n", " data = response.json()\n", " \n", " # Handle different response formats\n", " if isinstance(data, list):\n", " boards = data\n", " elif isinstance(data, dict):\n", " # Try common keys for device arrays\n", " for key in ['devices', 'boards', 'nodes', 'sensors', 'stations', 'data', 'results']:\n", " if key in data and isinstance(data[key], list):\n", " boards = data[key]\n", " break\n", " else:\n", " boards = []\n", " else:\n", " boards = []\n", " \n", " if boards:\n", " print(f\"βœ… Successfully fetched {len(boards)} boards from {endpoint}\")\n", " return boards\n", " \n", " except Exception as e:\n", " continue # Try next endpoint\n", " \n", " print(\"❌ Could not find boards at any common endpoint\")\n", " return []\n", " \n", " except requests.exceptions.RequestException as e:\n", " print(f\"❌ Network error: {e}\")\n", " return []\n", " except json.JSONDecodeError as e:\n", " print(f\"❌ JSON decode error: {e}\")\n", " return []\n", " except Exception as e:\n", " print(f\"❌ Unexpected error: {e}\")\n", " return []\n", "\n", "# Test the function (will be used later)\n", "print(\"πŸ“‘ Board fetching function defined successfully!\")\n", "print(\"🌿 Ready to query teleagriculture board data from platform API.\")" ] }, { "cell_type": "markdown", "id": "ec646dfe", "metadata": {}, "source": [ "## Data Point Counting Function\n", "\n", "Function to count sensor data points collected by each teleagriculture board. This could include temperature readings, soil moisture, humidity, light levels, etc." ] }, { "cell_type": "code", "execution_count": 30, "id": "875bf5fc", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "πŸ“‘ Sensor data counting functions defined successfully!\n", "🌱 Ready to analyze agricultural sensor data from teleagriculture boards.\n" ] } ], "source": [ "def count_board_data_points(board_id: str) -> int:\n", " \"\"\"\n", " Count sensor data points collected by a specific teleagriculture board.\n", " \n", " Args:\n", " board_id (str): The ID of the teleagriculture board\n", " \n", " Returns:\n", " int: Number of data points (sensor readings) collected by the board\n", " \"\"\"\n", " try:\n", " # Teleagriculture boards typically send sensor data to these types of endpoints\n", " possible_endpoints = [\n", " f\"/devices/{board_id}/data\", # Device data endpoint\n", " f\"/devices/{board_id}/measurements\", # Measurement endpoint \n", " f\"/devices/{board_id}/sensors\", # Sensor readings\n", " f\"/boards/{board_id}/readings\", # Board readings\n", " f\"/nodes/{board_id}/uplinks\", # LoRaWAN uplink messages\n", " f\"/stations/{board_id}/observations\" # Weather station observations\n", " ]\n", " \n", " for endpoint in possible_endpoints:\n", " try:\n", " url = f\"{BASE_URL}{endpoint}\"\n", " response = requests.get(url, headers=HEADERS, timeout=30)\n", " \n", " if response.status_code == 200:\n", " data = response.json()\n", " \n", " # Handle different data formats from IoT platforms\n", " if isinstance(data, list):\n", " count = len(data)\n", " elif isinstance(data, dict):\n", " # Try common keys for sensor data arrays\n", " for key in ['measurements', 'readings', 'data', 'sensors', 'uplinks', 'observations', 'records']:\n", " if key in data and isinstance(data[key], list):\n", " count = len(data[key])\n", " break\n", " else:\n", " # Count sensor types if structured differently\n", " sensor_keys = ['temperature', 'humidity', 'pressure', 'soil_moisture', 'light', 'ph', 'nitrogen']\n", " count = sum(1 for key in sensor_keys if key in data and data[key] is not None)\n", " \n", " if count == 0:\n", " count = len(data) # Fallback to total keys\n", " else:\n", " count = 0\n", " \n", " print(f\"πŸ“Š Board {board_id}: {count} data points found via {endpoint}\")\n", " return count\n", " \n", " except Exception as e:\n", " continue # Try next endpoint\n", " \n", " print(f\"⚠️ Could not fetch sensor data for board {board_id}\")\n", " return 0\n", " \n", " except Exception as e:\n", " print(f\"❌ Error counting data points for board {board_id}: {e}\")\n", " return 0\n", "\n", "def get_board_data_counts(boards: List[Dict]) -> Dict[str, Dict]:\n", " \"\"\"\n", " Get sensor data counts for all teleagriculture boards.\n", " \n", " Args:\n", " boards (List[Dict]): List of board/device objects from the platform\n", " \n", " Returns:\n", " Dict[str, Dict]: Dictionary with board info and data counts\n", " \"\"\"\n", " board_stats = {}\n", " \n", " for board in boards:\n", " # Handle different IoT platform object structures\n", " board_id = (board.get('id') or board.get('device_id') or board.get('node_id') or \n", " board.get('sensor_id') or board.get('station_id') or board.get('_id'))\n", " \n", " board_name = (board.get('name') or board.get('device_name') or board.get('label') or \n", " board.get('title') or board.get('station_name') or f\"Board {board_id}\")\n", " \n", " # Get location info if available (common in agricultural IoT)\n", " location = board.get('location') or board.get('coordinates') or board.get('position')\n", " \n", " if board_id:\n", " data_count = count_board_data_points(str(board_id))\n", " board_stats[board_id] = {\n", " 'name': board_name,\n", " 'data_count': data_count,\n", " 'location': location,\n", " 'board_info': board\n", " }\n", " else:\n", " print(f\"⚠️ Skipping board without ID: {board}\")\n", " \n", " return board_stats\n", "\n", "print(\"πŸ“‘ Sensor data counting functions defined successfully!\")\n", "print(\"🌱 Ready to analyze agricultural sensor data from teleagriculture boards.\")" ] }, { "cell_type": "markdown", "id": "e101dd72", "metadata": {}, "source": [ "## Find Board with Most Data Points\n", "\n", "Main execution logic to analyze all boards and identify the one with the most data points." ] }, { "cell_type": "code", "execution_count": 31, "id": "2d6d95de", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "πŸš€ Starting board analysis...\n", "==================================================\n", "πŸ“‹ Fetching all boards...\n", "❌ Could not find boards at any common endpoint\n", "❌ No boards found or error occurred. Check your API configuration.\n", "❌ Could not find boards at any common endpoint\n", "❌ No boards found or error occurred. Check your API configuration.\n" ] } ], "source": [ "def find_board_with_most_data():\n", " \"\"\"\n", " Main function to find the board with the most data points.\n", " \"\"\"\n", " print(\"πŸš€ Starting board analysis...\")\n", " print(\"=\" * 50)\n", " \n", " # Step 1: Fetch all boards\n", " print(\"πŸ“‹ Fetching all boards...\")\n", " boards = fetch_all_boards()\n", " \n", " if not boards:\n", " print(\"❌ No boards found or error occurred. Check your API configuration.\")\n", " return None\n", " \n", " print(f\"βœ… Found {len(boards)} boards\")\n", " print()\n", " \n", " # Step 2: Count data points for each board\n", " print(\"πŸ“Š Counting data points for each board...\")\n", " board_stats = get_board_data_counts(boards)\n", " \n", " if not board_stats:\n", " print(\"❌ Could not get data counts for any boards.\")\n", " return None\n", " \n", " # Step 3: Find the board with the most data points\n", " max_board_id = max(board_stats.keys(), key=lambda k: board_stats[k]['data_count'])\n", " max_board = board_stats[max_board_id]\n", " \n", " print()\n", " print(\"πŸ† RESULTS\")\n", " print(\"=\" * 50)\n", " print(f\"Board with most data points:\")\n", " print(f\" πŸ“‹ Name: {max_board['name']}\")\n", " print(f\" πŸ†” ID: {max_board_id}\")\n", " print(f\" πŸ“Š Data Points: {max_board['data_count']}\")\n", " print()\n", " \n", " # Summary of all boards\n", " print(\"πŸ“‹ All Boards Summary:\")\n", " print(\"-\" * 30)\n", " sorted_boards = sorted(board_stats.items(), key=lambda x: x[1]['data_count'], reverse=True)\n", " \n", " for i, (board_id, stats) in enumerate(sorted_boards, 1):\n", " emoji = \"πŸ₯‡\" if i == 1 else \"πŸ₯ˆ\" if i == 2 else \"πŸ₯‰\" if i == 3 else \"πŸ“‹\"\n", " print(f\"{emoji} {stats['name']}: {stats['data_count']} data points\")\n", " \n", " return {\n", " 'winner': max_board,\n", " 'winner_id': max_board_id,\n", " 'all_stats': board_stats\n", " }\n", "\n", "# Execute the analysis\n", "result = find_board_with_most_data()" ] }, { "cell_type": "markdown", "id": "b191ccdf", "metadata": {}, "source": [ "## Data Visualization\n", "\n", "Create charts and detailed analysis of the board data points." ] }, { "cell_type": "markdown", "id": "30486ada", "metadata": {}, "source": [ "## Sensor Scan: IDs 1001–1060\n", "\n", "Iterate over sensor IDs 1001 to 1060, query the platform API, and find the sensor with the most datapoints." ] }, { "cell_type": "code", "execution_count": 32, "id": "e860c05e", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "kit 1001 sensor ftTemp: 1219\n", "kit 1001 sensor gbHum: 1219\n", "kit 1001 sensor gbHum: 1219\n", "kit 1001 sensor gbTemp: 1219\n", "kit 1001 sensor gbTemp: 1219\n", "kit 1001 sensor Moist: 1219\n", "kit 1001 sensor Moist: 1219\n", "kit 1001 sensor CO: 1221\n", "kit 1001 sensor CO: 1221\n", "kit 1001 sensor NO2: 1221\n", "kit 1001 sensor NO2: 1221\n", "kit 1001 sensor NH3: 1221\n", "kit 1001 sensor NH3: 1221\n", "kit 1001 sensor C3H8: 1221\n", "kit 1001 sensor C3H8: 1221\n", "kit 1001 sensor C4H10: 1221\n", "kit 1001 sensor C4H10: 1221\n", "kit 1001 sensor CH4: 1221\n", "kit 1001 sensor CH4: 1221\n", "kit 1001 sensor H2: 1221\n", "kit 1001 sensor H2: 1221\n", "kit 1001 sensor C2H5OH: 1221\n", "kit 1001 sensor pH: 1219\n", "kit 1001 sensor NO3: 0\n", "kit 1001 sensor NO2_aq: 0\n", "kit 1001 sensor GH: 0\n", "kit 1001 sensor KH: 0\n", "kit 1001 sensor pH_strip: 0\n", "kit 1001 sensor Cl2: 0\n", "kit 1002 sensor ftTemp: 1218\n", "kit 1002 sensor gbHum: 1218\n", "kit 1002 sensor gbTemp: 1218\n", "kit 1002 sensor Moist: 1218\n", "kit 1002 sensor CO: 1218\n", "kit 1002 sensor NO2: 1218\n", "kit 1002 sensor NH3: 1218\n", "kit 1002 sensor C3H8: 1218\n", "kit 1002 sensor C4H10: 1218\n", "kit 1002 sensor CH4: 1218\n", "kit 1002 sensor H2: 1218\n", "kit 1002 sensor C2H5OH: 1218\n", "kit 1002 sensor pH: 1218\n", "kit 1002 sensor NO3: 0\n", "kit 1002 sensor NO2_aq: 0\n", "kit 1002 sensor GH: 0\n", "kit 1002 sensor KH: 0\n", "kit 1002 sensor pH_strip: 0\n", "kit 1002 sensor Cl2: 0\n", "kit 1002 sensor Battery: 663\n", "kit 1002 sensor temp: 1074\n", "kit 1003 sensor pH_strip: 2\n", "kit 1003 sensor temp2: 83\n", "kit 1003 sensor hum: 6000\n", "kit 1003 sensor temp: 4980\n", "kit 1003 sensor mois: 30\n", "kit 1003 sensor Battery: 210\n", "kit 1004 sensor ftTemp: 60\n", "kit 1004 sensor gbHum: 1548\n", "kit 1004 sensor gbTemp: 1547\n", "kit 1004 sensor Moist: 4658\n", "kit 1004 sensor Soil Moisture: 1210\n", "kit 1004 sensor NO2: 4658\n", "kit 1004 sensor NH3: 4658\n", "kit 1004 sensor C3H8: 4658\n", "kit 1004 sensor C4H10: 4658\n", "kit 1004 sensor CH4: 2880\n", "kit 1004 sensor H2: 4658\n", "kit 1004 sensor C2H5OH: 4658\n" ] }, { "ename": "KeyboardInterrupt", "evalue": "", "output_type": "error", "traceback": [ "\u001b[31m---------------------------------------------------------------------------\u001b[39m", "\u001b[31mKeyboardInterrupt\u001b[39m Traceback (most recent call last)", "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[32]\u001b[39m\u001b[32m, line 24\u001b[39m\n\u001b[32m 21\u001b[39m best = {\u001b[33m\"\u001b[39m\u001b[33mkit_id\u001b[39m\u001b[33m\"\u001b[39m: kit_id, \u001b[33m\"\u001b[39m\u001b[33msensor\u001b[39m\u001b[33m\"\u001b[39m: name, \u001b[33m\"\u001b[39m\u001b[33mcount\u001b[39m\u001b[33m\"\u001b[39m: cnt}\n\u001b[32m 22\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m best\n\u001b[32m---> \u001b[39m\u001b[32m24\u001b[39m best = \u001b[43mfind_max_sensor_in_range\u001b[49m\u001b[43m(\u001b[49m\u001b[32;43m1001\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[32;43m1060\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpage_size\u001b[49m\u001b[43m=\u001b[49m\u001b[32;43m50\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[32m 25\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[33mRESULT\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m 26\u001b[39m \u001b[38;5;28mprint\u001b[39m(best)\n", "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[32]\u001b[39m\u001b[32m, line 18\u001b[39m, in \u001b[36mfind_max_sensor_in_range\u001b[39m\u001b[34m(start_kit, end_kit, page_size)\u001b[39m\n\u001b[32m 16\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m name:\n\u001b[32m 17\u001b[39m \u001b[38;5;28;01mcontinue\u001b[39;00m\n\u001b[32m---> \u001b[39m\u001b[32m18\u001b[39m cnt = \u001b[43mcount_sensor_measurements\u001b[49m\u001b[43m(\u001b[49m\u001b[43mkit_id\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpage_size\u001b[49m\u001b[43m=\u001b[49m\u001b[43mpage_size\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 19\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mkit \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mkit_id\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m sensor \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mname\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mcnt\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m\"\u001b[39m)\n\u001b[32m 20\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m cnt > best[\u001b[33m\"\u001b[39m\u001b[33mcount\u001b[39m\u001b[33m\"\u001b[39m]:\n", "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[27]\u001b[39m\u001b[32m, line 28\u001b[39m, in \u001b[36mcount_sensor_measurements\u001b[39m\u001b[34m(kit_id, sensor_name, page_size, max_pages)\u001b[39m\n\u001b[32m 26\u001b[39m url = \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mBASE_URL\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m/kits/\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mkit_id\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m/\u001b[39m\u001b[38;5;132;01m{\u001b[39;00msensor_name\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m/measurements\u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m 27\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m---> \u001b[39m\u001b[32m28\u001b[39m r = \u001b[43mrequests\u001b[49m\u001b[43m.\u001b[49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[43m=\u001b[49m\u001b[43mHEADERS\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m=\u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m=\u001b[49m\u001b[32;43m30\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[32m 29\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m requests.RequestException:\n\u001b[32m 30\u001b[39m \u001b[38;5;28;01mbreak\u001b[39;00m\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/site-packages/requests/api.py:73\u001b[39m, in \u001b[36mget\u001b[39m\u001b[34m(url, params, **kwargs)\u001b[39m\n\u001b[32m 62\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mget\u001b[39m(url, params=\u001b[38;5;28;01mNone\u001b[39;00m, **kwargs):\n\u001b[32m 63\u001b[39m \u001b[38;5;250m \u001b[39m\u001b[33mr\u001b[39m\u001b[33;03m\"\"\"Sends a GET request.\u001b[39;00m\n\u001b[32m 64\u001b[39m \n\u001b[32m 65\u001b[39m \u001b[33;03m :param url: URL for the new :class:`Request` object.\u001b[39;00m\n\u001b[32m (...)\u001b[39m\u001b[32m 70\u001b[39m \u001b[33;03m :rtype: requests.Response\u001b[39;00m\n\u001b[32m 71\u001b[39m \u001b[33;03m \"\"\"\u001b[39;00m\n\u001b[32m---> \u001b[39m\u001b[32m73\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43mget\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[43m=\u001b[49m\u001b[43mparams\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/site-packages/requests/api.py:59\u001b[39m, in \u001b[36mrequest\u001b[39m\u001b[34m(method, url, **kwargs)\u001b[39m\n\u001b[32m 55\u001b[39m \u001b[38;5;66;03m# By using the 'with' statement we are sure the session is closed, thus we\u001b[39;00m\n\u001b[32m 56\u001b[39m \u001b[38;5;66;03m# avoid leaving sockets open which can trigger a ResourceWarning in some\u001b[39;00m\n\u001b[32m 57\u001b[39m \u001b[38;5;66;03m# cases, and look like a memory leak in others.\u001b[39;00m\n\u001b[32m 58\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m sessions.Session() \u001b[38;5;28;01mas\u001b[39;00m session:\n\u001b[32m---> \u001b[39m\u001b[32m59\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43msession\u001b[49m\u001b[43m.\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m=\u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m=\u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/site-packages/requests/sessions.py:589\u001b[39m, in \u001b[36mSession.request\u001b[39m\u001b[34m(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)\u001b[39m\n\u001b[32m 584\u001b[39m send_kwargs = {\n\u001b[32m 585\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mtimeout\u001b[39m\u001b[33m\"\u001b[39m: timeout,\n\u001b[32m 586\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mallow_redirects\u001b[39m\u001b[33m\"\u001b[39m: allow_redirects,\n\u001b[32m 587\u001b[39m }\n\u001b[32m 588\u001b[39m send_kwargs.update(settings)\n\u001b[32m--> \u001b[39m\u001b[32m589\u001b[39m resp = \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprep\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43msend_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 591\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m resp\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/site-packages/requests/sessions.py:703\u001b[39m, in \u001b[36mSession.send\u001b[39m\u001b[34m(self, request, **kwargs)\u001b[39m\n\u001b[32m 700\u001b[39m start = preferred_clock()\n\u001b[32m 702\u001b[39m \u001b[38;5;66;03m# Send the request\u001b[39;00m\n\u001b[32m--> \u001b[39m\u001b[32m703\u001b[39m r = \u001b[43madapter\u001b[49m\u001b[43m.\u001b[49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 705\u001b[39m \u001b[38;5;66;03m# Total elapsed time of the request (approximately)\u001b[39;00m\n\u001b[32m 706\u001b[39m elapsed = preferred_clock() - start\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/site-packages/requests/adapters.py:667\u001b[39m, in \u001b[36mHTTPAdapter.send\u001b[39m\u001b[34m(self, request, stream, timeout, verify, cert, proxies)\u001b[39m\n\u001b[32m 664\u001b[39m timeout = TimeoutSauce(connect=timeout, read=timeout)\n\u001b[32m 666\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m667\u001b[39m resp = \u001b[43mconn\u001b[49m\u001b[43m.\u001b[49m\u001b[43murlopen\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 668\u001b[39m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m=\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m.\u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 669\u001b[39m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m=\u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 670\u001b[39m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[43m=\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m.\u001b[49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 671\u001b[39m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[43m=\u001b[49m\u001b[43mrequest\u001b[49m\u001b[43m.\u001b[49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 672\u001b[39m \u001b[43m \u001b[49m\u001b[43mredirect\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 673\u001b[39m \u001b[43m \u001b[49m\u001b[43massert_same_host\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 674\u001b[39m \u001b[43m \u001b[49m\u001b[43mpreload_content\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 675\u001b[39m \u001b[43m \u001b[49m\u001b[43mdecode_content\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[32m 676\u001b[39m \u001b[43m \u001b[49m\u001b[43mretries\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mmax_retries\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 677\u001b[39m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m=\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 678\u001b[39m \u001b[43m \u001b[49m\u001b[43mchunked\u001b[49m\u001b[43m=\u001b[49m\u001b[43mchunked\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 679\u001b[39m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 681\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m (ProtocolError, \u001b[38;5;167;01mOSError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m err:\n\u001b[32m 682\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mConnectionError\u001b[39;00m(err, request=request)\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/site-packages/urllib3/connectionpool.py:787\u001b[39m, in \u001b[36mHTTPConnectionPool.urlopen\u001b[39m\u001b[34m(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, preload_content, decode_content, **response_kw)\u001b[39m\n\u001b[32m 784\u001b[39m response_conn = conn \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m release_conn \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[32m 786\u001b[39m \u001b[38;5;66;03m# Make the request on the HTTPConnection object\u001b[39;00m\n\u001b[32m--> \u001b[39m\u001b[32m787\u001b[39m response = \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_make_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[32m 788\u001b[39m \u001b[43m \u001b[49m\u001b[43mconn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 789\u001b[39m \u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 790\u001b[39m \u001b[43m \u001b[49m\u001b[43murl\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 791\u001b[39m \u001b[43m \u001b[49m\u001b[43mtimeout\u001b[49m\u001b[43m=\u001b[49m\u001b[43mtimeout_obj\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 792\u001b[39m \u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[43m=\u001b[49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 793\u001b[39m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[43m=\u001b[49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 794\u001b[39m \u001b[43m \u001b[49m\u001b[43mchunked\u001b[49m\u001b[43m=\u001b[49m\u001b[43mchunked\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 795\u001b[39m \u001b[43m \u001b[49m\u001b[43mretries\u001b[49m\u001b[43m=\u001b[49m\u001b[43mretries\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 796\u001b[39m \u001b[43m \u001b[49m\u001b[43mresponse_conn\u001b[49m\u001b[43m=\u001b[49m\u001b[43mresponse_conn\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 797\u001b[39m \u001b[43m \u001b[49m\u001b[43mpreload_content\u001b[49m\u001b[43m=\u001b[49m\u001b[43mpreload_content\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 798\u001b[39m \u001b[43m \u001b[49m\u001b[43mdecode_content\u001b[49m\u001b[43m=\u001b[49m\u001b[43mdecode_content\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 799\u001b[39m \u001b[43m \u001b[49m\u001b[43m*\u001b[49m\u001b[43m*\u001b[49m\u001b[43mresponse_kw\u001b[49m\u001b[43m,\u001b[49m\n\u001b[32m 800\u001b[39m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 802\u001b[39m \u001b[38;5;66;03m# Everything went great!\u001b[39;00m\n\u001b[32m 803\u001b[39m clean_exit = \u001b[38;5;28;01mTrue\u001b[39;00m\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/site-packages/urllib3/connectionpool.py:534\u001b[39m, in \u001b[36mHTTPConnectionPool._make_request\u001b[39m\u001b[34m(self, conn, method, url, body, headers, retries, timeout, chunked, response_conn, preload_content, decode_content, enforce_content_length)\u001b[39m\n\u001b[32m 532\u001b[39m \u001b[38;5;66;03m# Receive the response from the server\u001b[39;00m\n\u001b[32m 533\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m534\u001b[39m response = \u001b[43mconn\u001b[49m\u001b[43m.\u001b[49m\u001b[43mgetresponse\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 535\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m (BaseSSLError, \u001b[38;5;167;01mOSError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[32m 536\u001b[39m \u001b[38;5;28mself\u001b[39m._raise_timeout(err=e, url=url, timeout_value=read_timeout)\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/site-packages/urllib3/connection.py:516\u001b[39m, in \u001b[36mHTTPConnection.getresponse\u001b[39m\u001b[34m(self)\u001b[39m\n\u001b[32m 513\u001b[39m _shutdown = \u001b[38;5;28mgetattr\u001b[39m(\u001b[38;5;28mself\u001b[39m.sock, \u001b[33m\"\u001b[39m\u001b[33mshutdown\u001b[39m\u001b[33m\"\u001b[39m, \u001b[38;5;28;01mNone\u001b[39;00m)\n\u001b[32m 515\u001b[39m \u001b[38;5;66;03m# Get the response from http.client.HTTPConnection\u001b[39;00m\n\u001b[32m--> \u001b[39m\u001b[32m516\u001b[39m httplib_response = \u001b[38;5;28;43msuper\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m.\u001b[49m\u001b[43mgetresponse\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 518\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m 519\u001b[39m assert_header_parsing(httplib_response.msg)\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/http/client.py:1430\u001b[39m, in \u001b[36mHTTPConnection.getresponse\u001b[39m\u001b[34m(self)\u001b[39m\n\u001b[32m 1428\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m 1429\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1430\u001b[39m \u001b[43mresponse\u001b[49m\u001b[43m.\u001b[49m\u001b[43mbegin\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1431\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mConnectionError\u001b[39;00m:\n\u001b[32m 1432\u001b[39m \u001b[38;5;28mself\u001b[39m.close()\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/http/client.py:331\u001b[39m, in \u001b[36mHTTPResponse.begin\u001b[39m\u001b[34m(self)\u001b[39m\n\u001b[32m 329\u001b[39m \u001b[38;5;66;03m# read until we get a non-100 response\u001b[39;00m\n\u001b[32m 330\u001b[39m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m331\u001b[39m version, status, reason = \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_read_status\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 332\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m status != CONTINUE:\n\u001b[32m 333\u001b[39m \u001b[38;5;28;01mbreak\u001b[39;00m\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/http/client.py:292\u001b[39m, in \u001b[36mHTTPResponse._read_status\u001b[39m\u001b[34m(self)\u001b[39m\n\u001b[32m 291\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34m_read_status\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[32m--> \u001b[39m\u001b[32m292\u001b[39m line = \u001b[38;5;28mstr\u001b[39m(\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mfp\u001b[49m\u001b[43m.\u001b[49m\u001b[43mreadline\u001b[49m\u001b[43m(\u001b[49m\u001b[43m_MAXLINE\u001b[49m\u001b[43m \u001b[49m\u001b[43m+\u001b[49m\u001b[43m \u001b[49m\u001b[32;43m1\u001b[39;49m\u001b[43m)\u001b[49m, \u001b[33m\"\u001b[39m\u001b[33miso-8859-1\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m 293\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(line) > _MAXLINE:\n\u001b[32m 294\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m LineTooLong(\u001b[33m\"\u001b[39m\u001b[33mstatus line\u001b[39m\u001b[33m\"\u001b[39m)\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/socket.py:719\u001b[39m, in \u001b[36mSocketIO.readinto\u001b[39m\u001b[34m(self, b)\u001b[39m\n\u001b[32m 717\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mOSError\u001b[39;00m(\u001b[33m\"\u001b[39m\u001b[33mcannot read from timed out object\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m 718\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m719\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_sock\u001b[49m\u001b[43m.\u001b[49m\u001b[43mrecv_into\u001b[49m\u001b[43m(\u001b[49m\u001b[43mb\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 720\u001b[39m \u001b[38;5;28;01mexcept\u001b[39;00m timeout:\n\u001b[32m 721\u001b[39m \u001b[38;5;28mself\u001b[39m._timeout_occurred = \u001b[38;5;28;01mTrue\u001b[39;00m\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/ssl.py:1304\u001b[39m, in \u001b[36mSSLSocket.recv_into\u001b[39m\u001b[34m(self, buffer, nbytes, flags)\u001b[39m\n\u001b[32m 1300\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m flags != \u001b[32m0\u001b[39m:\n\u001b[32m 1301\u001b[39m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[32m 1302\u001b[39m \u001b[33m\"\u001b[39m\u001b[33mnon-zero flags not allowed in calls to recv_into() on \u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[33m\"\u001b[39m %\n\u001b[32m 1303\u001b[39m \u001b[38;5;28mself\u001b[39m.\u001b[34m__class__\u001b[39m)\n\u001b[32m-> \u001b[39m\u001b[32m1304\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43mread\u001b[49m\u001b[43m(\u001b[49m\u001b[43mnbytes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbuffer\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1305\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m 1306\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28msuper\u001b[39m().recv_into(buffer, nbytes, flags)\n", "\u001b[36mFile \u001b[39m\u001b[32m~/miniforge3/envs/random/lib/python3.13/ssl.py:1138\u001b[39m, in \u001b[36mSSLSocket.read\u001b[39m\u001b[34m(self, len, buffer)\u001b[39m\n\u001b[32m 1136\u001b[39m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m 1137\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m buffer \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1138\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m.\u001b[49m\u001b[43m_sslobj\u001b[49m\u001b[43m.\u001b[49m\u001b[43mread\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mlen\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbuffer\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m 1139\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m 1140\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m._sslobj.read(\u001b[38;5;28mlen\u001b[39m)\n", "\u001b[31mKeyboardInterrupt\u001b[39m: " ] } ], "source": [ "# Minimal scan: kits 1001..1060 β€” find sensor with most datapoints\n", "\n", "def find_max_sensor_in_range(start_kit: int = 1015, end_kit: int = 1060, page_size: int = 50) -> dict:\n", " best = {\"kit_id\": None, \"sensor\": None, \"count\": -1}\n", " for kit_id in range(start_kit, end_kit + 1):\n", " kit = get_kit_info(kit_id)\n", " if not kit or not isinstance(kit, dict):\n", " print(f\"kit {kit_id}: not found\")\n", " continue\n", " sensors = kit.get(\"sensors\") or []\n", " if not sensors:\n", " print(f\"kit {kit_id}: no sensors\")\n", " continue\n", " for s in sensors:\n", " name = s.get(\"name\")\n", " if not name:\n", " continue\n", " cnt = count_sensor_measurements(kit_id, name, page_size=page_size)\n", " print(f\"kit {kit_id} sensor {name}: {cnt}\")\n", " if cnt > best[\"count\"]:\n", " best = {\"kit_id\": kit_id, \"sensor\": name, \"count\": cnt}\n", " return best\n", "\n", "best = find_max_sensor_in_range(1001, 1060, page_size=50)\n", "print(\"\\nRESULT\")\n", "print(best)" ] }, { "cell_type": "code", "execution_count": 18, "id": "3216b9fb", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "πŸ”Ž Scanning sensors from 1001 to 1060...\n", "Sensor 1001: 0 datapoints (via /sensors/1001)\n", "Sensor 1002: 0 datapoints (via /sensors/1002)\n", "Sensor 1003: 0 datapoints (via /sensors/1003)\n", "Sensor 1004: 0 datapoints (via /sensors/1004)\n", "Sensor 1005: 0 datapoints (via /sensors/1005)\n", "Sensor 1006: 0 datapoints (via /sensors/1006)\n", "Sensor 1007: 0 datapoints (via /sensors/1007)\n", "Sensor 1008: 0 datapoints (via /sensors/1008)\n", "Sensor 1009: 0 datapoints (via /sensors/1009)\n", "Sensor 1010: 0 datapoints (via /sensors/1010)\n", "Sensor 1011: 0 datapoints (via /sensors/1011)\n", "Sensor 1012: 0 datapoints (via /sensors/1012)\n", "Sensor 1013: 0 datapoints (via /sensors/1013)\n", "Sensor 1014: 0 datapoints (via /sensors/1014)\n", "Sensor 1015: 0 datapoints (via /sensors/1015)\n", "Sensor 1016: 0 datapoints (via /sensors/1016)\n", "Sensor 1017: 0 datapoints (via /sensors/1017)\n", "Sensor 1018: 0 datapoints (via /sensors/1018)\n", "Sensor 1019: 0 datapoints (via /sensors/1019)\n", "Sensor 1020: 0 datapoints (via /sensors/1020)\n", "Sensor 1021: 0 datapoints (via /sensors/1021)\n", "Sensor 1022: 0 datapoints (via /sensors/1022)\n", "Sensor 1023: 0 datapoints (via /sensors/1023)\n", "Sensor 1024: 0 datapoints (via /sensors/1024)\n", "Sensor 1025: 0 datapoints (via /sensors/1025)\n", "Sensor 1026: 0 datapoints (via /sensors/1026)\n", "Sensor 1027: 0 datapoints (via /sensors/1027)\n", "Sensor 1028: 0 datapoints (via /sensors/1028)\n", "Sensor 1029: 0 datapoints (via /sensors/1029)\n", "Sensor 1030: 0 datapoints (via /sensors/1030)\n", "Sensor 1031: 0 datapoints (via /sensors/1031)\n", "Sensor 1032: 0 datapoints (via /sensors/1032)\n", "Sensor 1033: 0 datapoints (via /sensors/1033)\n", "Sensor 1034: 0 datapoints (via /sensors/1034)\n", "Sensor 1035: 0 datapoints (via /sensors/1035)\n", "Sensor 1036: 0 datapoints (via /sensors/1036)\n", "Sensor 1037: 0 datapoints (via /sensors/1037)\n", "Sensor 1038: 0 datapoints (via /sensors/1038)\n", "Sensor 1039: 0 datapoints (via /sensors/1039)\n", "Sensor 1040: 0 datapoints (via /sensors/1040)\n", "Sensor 1041: 0 datapoints (via /sensors/1041)\n", "Sensor 1042: 0 datapoints (via /sensors/1042)\n", "Sensor 1043: 0 datapoints (via /sensors/1043)\n", "Sensor 1044: 0 datapoints (via /sensors/1044)\n", "Sensor 1045: 0 datapoints (via /sensors/1045)\n", "Sensor 1046: 0 datapoints (via /sensors/1046)\n", "Sensor 1047: 0 datapoints (via /sensors/1047)\n", "Sensor 1048: 0 datapoints (via /sensors/1048)\n", "Sensor 1049: 0 datapoints (via /sensors/1049)\n", "Sensor 1050: 0 datapoints (via /sensors/1050)\n", "Sensor 1051: 0 datapoints (via /sensors/1051)\n", "Sensor 1052: 0 datapoints (via /sensors/1052)\n", "Sensor 1053: 0 datapoints (via /sensors/1053/readings)\n", "Sensor 1054: 0 datapoints (via /sensors/1054)\n", "Sensor 1055: 0 datapoints (via /sensors/1055)\n", "Sensor 1056: 0 datapoints (via /sensors/1056)\n", "Sensor 1057: 0 datapoints (via /sensors/1057)\n", "Sensor 1058: 0 datapoints (via /sensors/1058)\n", "Sensor 1059: 0 datapoints (via /sensors/1059)\n", "Sensor 1060: 0 datapoints (via /sensors/1060)\n", "\n", "🏁 Scan complete.\n", "πŸ† Sensor with most datapoints:\n", " πŸ†” ID: 1001\n", " πŸ“Š Count: 0\n", " πŸ”— Endpoint: /sensors/1001\n" ] } ], "source": [ "from collections import defaultdict\n", "\n", "def _count_datapoints_from_response(data) -> int:\n", " \"\"\"Best-effort count of datapoints from arbitrary API responses.\"\"\"\n", " if data is None:\n", " return 0\n", " if isinstance(data, list):\n", " return len(data)\n", " if isinstance(data, dict):\n", " # Prefer common array keys\n", " for key in [\n", " 'data', 'results', 'measurements', 'readings', 'entries', 'values',\n", " 'observations', 'records', 'points'\n", " ]:\n", " if key in data and isinstance(data[key], list):\n", " return len(data[key])\n", " # Fallback: count scalar series\n", " return sum(1 for v in data.values() if isinstance(v, (int, float, str, bool)))\n", " return 0\n", "\n", "\n", "def fetch_sensor_datapoints(sensor_id: int) -> tuple[int, dict]:\n", " \"\"\"\n", " Try multiple likely endpoints for a sensor and return the datapoint count and last successful meta.\n", " Returns (count, meta) where meta contains endpoint and status.\n", " \"\"\"\n", " endpoints = [\n", " f\"/sensors/{sensor_id}\",\n", " f\"/sensors/{sensor_id}/data\",\n", " f\"/sensors/{sensor_id}/readings\",\n", " f\"/sensors/{sensor_id}/measurements\",\n", " f\"/devices/{sensor_id}/data\",\n", " f\"/nodes/{sensor_id}/uplinks\",\n", " ]\n", "\n", " last_error = None\n", " for ep in endpoints:\n", " url = f\"{BASE_URL.rstrip('/')}{ep}\"\n", " try:\n", " r = requests.get(url, headers=HEADERS, timeout=30)\n", " if r.status_code == 200:\n", " try:\n", " data = r.json()\n", " except Exception:\n", " data = None\n", " count = _count_datapoints_from_response(data)\n", " return count, {\"endpoint\": ep, \"status\": r.status_code}\n", " else:\n", " last_error = {\"endpoint\": ep, \"status\": r.status_code, \"text\": r.text[:200]}\n", " except requests.RequestException as e:\n", " last_error = {\"endpoint\": ep, \"error\": str(e)}\n", " continue\n", " return 0, (last_error or {\"endpoint\": None, \"error\": \"no-endpoint-succeeded\"})\n", "\n", "\n", "def scan_sensors_and_find_max(start_id: int = 1001, end_id: int = 1060):\n", " print(f\"πŸ”Ž Scanning sensors from {start_id} to {end_id}...\")\n", " best = {\n", " \"sensor_id\": None,\n", " \"count\": -1,\n", " \"meta\": {}\n", " }\n", " results = {}\n", "\n", " for sid in range(start_id, end_id + 1):\n", " count, meta = fetch_sensor_datapoints(sid)\n", " results[sid] = {\"count\": count, \"meta\": meta}\n", " print(f\"Sensor {sid}: {count} datapoints (via {meta.get('endpoint')})\")\n", " if count > best[\"count\"]:\n", " best = {\"sensor_id\": sid, \"count\": count, \"meta\": meta}\n", "\n", " print(\"\\n🏁 Scan complete.\")\n", " if best[\"sensor_id\"] is not None:\n", " print(\"πŸ† Sensor with most datapoints:\")\n", " print(f\" πŸ†” ID: {best['sensor_id']}\")\n", " print(f\" πŸ“Š Count: {best['count']}\")\n", " print(f\" πŸ”— Endpoint: {best['meta'].get('endpoint')}\")\n", " else:\n", " print(\"No sensors returned datapoints in the given range.\")\n", "\n", " return {\"best\": best, \"results\": results}\n", "\n", "# Run the scan now\n", "scan_result = scan_sensors_and_find_max(1001, 1060)" ] }, { "cell_type": "code", "execution_count": 16, "id": "46506887", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "⚠️ Run the board analysis first to see visualizations.\n", "πŸ’‘ Make sure to update the API_KEY and BASE_URL in the configuration section.\n" ] } ], "source": [ "def create_board_analysis_chart(board_stats: Dict[str, Dict]):\n", " \"\"\"\n", " Create visualizations for board data analysis.\n", " \n", " Args:\n", " board_stats (Dict[str, Dict]): Board statistics from get_board_data_counts\n", " \"\"\"\n", " if not board_stats:\n", " print(\"❌ No board statistics available for visualization.\")\n", " return\n", " \n", " # Prepare data for plotting\n", " board_names = [stats['name'] for stats in board_stats.values()]\n", " data_counts = [stats['data_count'] for stats in board_stats.values()]\n", " board_ids = list(board_stats.keys())\n", " \n", " # Create DataFrame for better handling\n", " df = pd.DataFrame({\n", " 'Board ID': board_ids,\n", " 'Board Name': board_names,\n", " 'Data Points': data_counts\n", " })\n", " \n", " # Sort by data points for better visualization\n", " df = df.sort_values('Data Points', ascending=True)\n", " \n", " # Create the plot\n", " plt.figure(figsize=(12, 8))\n", " \n", " # Horizontal bar chart\n", " bars = plt.barh(range(len(df)), df['Data Points'], color='skyblue', alpha=0.7)\n", " \n", " # Customize the plot\n", " plt.yticks(range(len(df)), df['Board Name'])\n", " plt.xlabel('Number of Data Points')\n", " plt.title('Data Points per Board - Teleagriculture API Analysis', fontsize=16, fontweight='bold')\n", " plt.grid(axis='x', alpha=0.3)\n", " \n", " # Add value labels on bars\n", " for i, (bar, value) in enumerate(zip(bars, df['Data Points'])):\n", " plt.text(value + max(df['Data Points']) * 0.01, bar.get_y() + bar.get_height()/2, \n", " str(value), va='center', fontweight='bold')\n", " \n", " # Highlight the board with most data points\n", " max_idx = df['Data Points'].idxmax()\n", " bars[df.index.get_loc(max_idx)].set_color('gold')\n", " bars[df.index.get_loc(max_idx)].set_alpha(1.0)\n", " \n", " plt.tight_layout()\n", " plt.show()\n", " \n", " # Print detailed statistics\n", " print(\"πŸ“Š DETAILED STATISTICS\")\n", " print(\"=\" * 50)\n", " print(f\"Total boards analyzed: {len(df)}\")\n", " print(f\"Total data points across all boards: {df['Data Points'].sum()}\")\n", " print(f\"Average data points per board: {df['Data Points'].mean():.1f}\")\n", " print(f\"Median data points per board: {df['Data Points'].median():.1f}\")\n", " print(f\"Standard deviation: {df['Data Points'].std():.1f}\")\n", " print()\n", " \n", " # Show top 3 boards\n", " top_3 = df.nlargest(3, 'Data Points')\n", " print(\"πŸ† TOP 3 BOARDS:\")\n", " for i, (_, row) in enumerate(top_3.iterrows(), 1):\n", " emoji = \"πŸ₯‡\" if i == 1 else \"πŸ₯ˆ\" if i == 2 else \"πŸ₯‰\"\n", " print(f\"{emoji} {row['Board Name']}: {row['Data Points']} data points\")\n", " \n", " return df\n", "\n", "# Create visualization if we have results\n", "if 'result' in locals() and result and result.get('all_stats'):\n", " print(\"πŸ“ˆ Creating visualization...\")\n", " df_analysis = create_board_analysis_chart(result['all_stats'])\n", "else:\n", " print(\"⚠️ Run the board analysis first to see visualizations.\")\n", " print(\"πŸ’‘ Make sure to update the API_KEY and BASE_URL in the configuration section.\")" ] }, { "cell_type": "code", "execution_count": null, "id": "e01c5bf6", "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "markdown", "id": "3553a610", "metadata": {}, "source": [ "## Simple helper: get all sensor data for a kit id\n", "\n", "This function fetches all available measurements for a given kit (board) id across all its sensors and returns a tidy pandas DataFrame. It uses the same BASE_URL and HEADERS configured above and follows the API's cursor pagination automatically.\n", "\n", "- Input: kit_id (int)\n", "- Optional: sensors (list[str]) to limit which sensors to fetch; defaults to all sensors on the kit\n", "- Output: pandas DataFrame with columns like: kit_id, sensor, timestamp/value/..., depending on the API payload" ] }, { "cell_type": "code", "execution_count": null, "id": "051f0aab", "metadata": {}, "outputs": [], "source": [ "from typing import Iterable, Any\n", "\n", "def _paginate(url: str, params: dict | None = None, headers: dict | None = None, page_size: int = 100, max_pages: int = 500):\n", " \"\"\"Generator yielding pages from cursor-paginated endpoint returning {'data': [...], 'meta': {'next_cursor': '...'}}\"\"\"\n", " params = dict(params or {})\n", " params[\"page[size]\"] = str(page_size)\n", " cursor = None\n", " pages = 0\n", " while pages < max_pages:\n", " if cursor:\n", " params[\"page[cursor]\"] = cursor\n", " try:\n", " r = requests.get(url, headers=headers, params=params, timeout=30)\n", " except requests.RequestException:\n", " break\n", " if r.status_code != 200:\n", " break\n", " try:\n", " payload = r.json()\n", " except Exception:\n", " break\n", " data = payload.get(\"data\")\n", " meta = payload.get(\"meta\", {})\n", " yield data if isinstance(data, list) else []\n", " cursor = meta.get(\"next_cursor\")\n", " pages += 1\n", " if not cursor:\n", " break\n", "\n", "\n", "def get_kit_measurements_df(kit_id: int, sensors: Iterable[str] | None = None, page_size: int = 100) -> pd.DataFrame:\n", " \"\"\"\n", " Fetch all measurements for a given kit across selected sensors and return a tidy DataFrame.\n", "\n", " - kit_id: numeric id of the kit/board\n", " - sensors: optional list of sensor names; if None, will discover sensors via get_kit_info(kit_id)\n", " - page_size: page size for cursor pagination\n", "\n", " Returns a DataFrame with columns: kit_id, sensor, timestamp, value, unit, _raw\n", " (Columns may include NaNs if the API doesn't provide those fields.)\n", " \"\"\"\n", " # Discover sensors if not provided\n", " sensor_list: list[str]\n", " if sensors is None:\n", " kit = get_kit_info(kit_id)\n", " if not kit:\n", " return pd.DataFrame(columns=[\"kit_id\", \"sensor\", \"timestamp\", \"value\", \"unit\", \"_raw\"])\n", " sensor_list = [s.get(\"name\") for s in (kit.get(\"sensors\") or []) if isinstance(s, dict) and s.get(\"name\")]\n", " else:\n", " sensor_list = [s for s in sensors if s]\n", "\n", " rows: list[dict[str, Any]] = []\n", "\n", " for sname in sensor_list:\n", " base = f\"{BASE_URL}/kits/{kit_id}/{sname}/measurements\"\n", " for page in _paginate(base, headers=HEADERS, page_size=page_size):\n", " for item in page:\n", " if not isinstance(item, dict):\n", " continue\n", " rec = item\n", " # Some APIs wrap fields inside 'attributes'\n", " if isinstance(rec.get(\"attributes\"), dict):\n", " # merge attributes shallowly (attributes wins for overlapping keys)\n", " rec = {**{k: v for k, v in rec.items() if k != \"attributes\"}, **rec[\"attributes\"]}\n", " # Normalize common fields\n", " ts = rec.get(\"timestamp\") or rec.get(\"time\") or rec.get(\"created_at\") or rec.get(\"datetime\")\n", " val = rec.get(\"value\") or rec.get(\"reading\") or rec.get(\"measurement\") or rec.get(\"val\")\n", " unit = rec.get(\"unit\") or rec.get(\"units\")\n", " rows.append({\n", " \"kit_id\": kit_id,\n", " \"sensor\": sname,\n", " \"timestamp\": ts,\n", " \"value\": val,\n", " \"unit\": unit,\n", " \"_raw\": item, # keep original\n", " })\n", "\n", " df = pd.DataFrame(rows)\n", " # Coerce timestamp and sort\n", " if not df.empty and \"timestamp\" in df.columns:\n", " try:\n", " df[\"timestamp\"] = pd.to_datetime(df[\"timestamp\"], errors=\"coerce\", utc=True)\n", " df = df.sort_values([\"sensor\", \"timestamp\"], kind=\"stable\")\n", " except Exception:\n", " pass\n", " return df" ] }, { "cell_type": "code", "execution_count": null, "id": "e5f429f9", "metadata": {}, "outputs": [], "source": [ "# Demo: fetch all data for a kit id (adjust kit_id)\n", "KIT_DEMO_ID = 1001 # change as needed\n", "\n", "df_all = get_kit_measurements_df(KIT_DEMO_ID)\n", "print(f\"Fetched {len(df_all)} rows for kit {KIT_DEMO_ID}\")\n", "df_all.head()" ] }, { "cell_type": "code", "execution_count": null, "id": "61c9be14", "metadata": {}, "outputs": [], "source": [ "# Simplest helper: get a DataFrame for a kit id\n", "\n", "def get_kit_df(kit_id: int) -> pd.DataFrame:\n", " return get_kit_measurements_df(kit_id)\n", "\n", "# Example usage:\n", "# df = get_kit_df(1001)\n", "# df.head()" ] } ], "metadata": { "kernelspec": { "display_name": "random", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.13.3" } }, "nbformat": 4, "nbformat_minor": 5 }