Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
File size: 2,535 Bytes
07505ba |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
import asyncio
import json
from datetime import datetime, timedelta
from typing import List
import dropbox
import logging
from modules.dropbox.client import dbx
from modules.video.model import VideoMetadata
# Initialize logger
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# Dropbox client (already configured)
# dbx = dropbox.Dropbox(app_key=APP_KEY, app_secret=APP_SECRET, oauth2_refresh_token=REFRESH_TOKEN)
# Cache dictionary
# Key: scripture_name, Value: {"timestamp": datetime, "data": List[VideoMetadata]}
_video_cache: dict[str, dict] = {}
CACHE_TTL = timedelta(hours=1) # Cache time-to-live
async def fetch_video_urls_from_dropbox(scripture_name: str) -> list[VideoMetadata]:
"""
Fetch video metadata JSON from Dropbox with caching.
:param scripture_name: Name of the scripture folder in Dropbox
:return: List of VideoMetadata objects
"""
loop = asyncio.get_running_loop()
# Check if we have cached data that is still fresh
cache_entry = _video_cache.get(scripture_name)
if cache_entry:
age = datetime.now() - cache_entry["timestamp"]
if age < CACHE_TTL:
logger.info(f"Using cached video metadata for '{scripture_name}' (age: {age})")
return cache_entry["data"]
logger.info(f"Fetching video metadata for '{scripture_name}' from Dropbox")
try:
# Dropbox SDK is synchronous, run in executor
metadata, res = await loop.run_in_executor(
None, dbx.files_download, f"/{scripture_name}/video_metadata.json"
)
data = res.content.decode("utf-8")
logger.info("video data = %s",data)
json_list = json.loads(data)
# Convert each JSON item to VideoMetadata
video_data = [VideoMetadata(**item) for item in json_list]
# Update cache
_video_cache[scripture_name] = {
"timestamp": datetime.now(),
"data": video_data
}
logger.info(f"Cached {len(video_data)} videos for '{scripture_name}'")
return video_data
except Exception as e:
logger.error(f"Error fetching video metadata for '{scripture_name}' from Dropbox", exc_info=e)
# Fallback to cached data if available
if cache_entry:
logger.warning(f"Returning stale cached data for '{scripture_name}' due to Dropbox error")
return cache_entry["data"]
else:
logger.warning(f"No cached data available for '{scripture_name}', returning empty list")
return []
|