Spaces:
Running
Running
| """ | |
| Environment setup and dependency checking utilities | |
| """ | |
| import os | |
| import sys | |
| import subprocess | |
| import torch | |
| import gc | |
| import warnings | |
| import logging | |
| warnings.filterwarnings('ignore') | |
| logging.basicConfig(level=logging.INFO) | |
| def setup_environment(): | |
| """Setup the environment for optimal performance""" | |
| print("π§ Setting up environment...") | |
| # Setup Hugging Face authentication | |
| print("π Setting up Hugging Face authentication...") | |
| try: | |
| from google.colab import userdata | |
| HF_TOKEN = userdata.get('HF_TOKEN') | |
| if HF_TOKEN: | |
| print("β Token loaded from Colab secrets") | |
| from huggingface_hub import login | |
| login(token=HF_TOKEN) | |
| print("β Hugging Face authentication successful!") | |
| else: | |
| print("β οΈ No HF_TOKEN found in secrets - using public models only") | |
| print("π‘ Add HF_TOKEN to secrets for access to more models") | |
| except Exception as e: | |
| print(f"β οΈ Authentication skipped: {str(e)}") | |
| print("π Continuing with public models...") | |
| # Setup GPU if available | |
| if torch.cuda.is_available(): | |
| torch.cuda.empty_cache() | |
| gc.collect() | |
| print(f"π§ GPU Memory cleared and ready") | |
| print(f"π― GPU Available: {torch.cuda.get_device_name(0)}") | |
| else: | |
| print("β οΈ No GPU available, using CPU (will be slower)") | |
| print("β Environment setup complete!") | |
| def check_dependencies(): | |
| """Check if all required dependencies are available""" | |
| required_packages = [ | |
| 'torch', 'transformers', 'huggingface_hub', | |
| 'gradio', 'sympy', 'numpy' | |
| ] | |
| missing_packages = [] | |
| for package in required_packages: | |
| try: | |
| __import__(package) | |
| except ImportError: | |
| missing_packages.append(package) | |
| if missing_packages: | |
| print(f"β Missing packages: {', '.join(missing_packages)}") | |
| return False | |
| print("β All dependencies available") | |
| return True | |