Spaces:
Running
Running
File size: 2,054 Bytes
eb9e10f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
"""
Environment setup and dependency checking utilities
"""
import os
import sys
import subprocess
import torch
import gc
import warnings
import logging
warnings.filterwarnings('ignore')
logging.basicConfig(level=logging.INFO)
def setup_environment():
"""Setup the environment for optimal performance"""
print("🔧 Setting up environment...")
# Setup Hugging Face authentication
print("🔐 Setting up Hugging Face authentication...")
try:
from google.colab import userdata
HF_TOKEN = userdata.get('HF_TOKEN')
if HF_TOKEN:
print("✅ Token loaded from Colab secrets")
from huggingface_hub import login
login(token=HF_TOKEN)
print("✅ Hugging Face authentication successful!")
else:
print("⚠️ No HF_TOKEN found in secrets - using public models only")
print("💡 Add HF_TOKEN to secrets for access to more models")
except Exception as e:
print(f"⚠️ Authentication skipped: {str(e)}")
print("🔄 Continuing with public models...")
# Setup GPU if available
if torch.cuda.is_available():
torch.cuda.empty_cache()
gc.collect()
print(f"🔧 GPU Memory cleared and ready")
print(f"🎯 GPU Available: {torch.cuda.get_device_name(0)}")
else:
print("⚠️ No GPU available, using CPU (will be slower)")
print("✅ Environment setup complete!")
def check_dependencies():
"""Check if all required dependencies are available"""
required_packages = [
'torch', 'transformers', 'huggingface_hub',
'gradio', 'sympy', 'numpy'
]
missing_packages = []
for package in required_packages:
try:
__import__(package)
except ImportError:
missing_packages.append(package)
if missing_packages:
print(f"❌ Missing packages: {', '.join(missing_packages)}")
return False
print("✅ All dependencies available")
return True
|