Spaces:
Runtime error
Runtime error
| #!/usr/bin/env python3 | |
| """ | |
| Download a sample GGUF model for testing llama.cpp integration | |
| """ | |
| import os | |
| from huggingface_hub import hf_hub_download | |
| from config import get_recommended_model, MODEL_DOWNLOAD_CONFIG | |
| def download_sample_model(): | |
| """Download a recommended small model for testing""" | |
| model_info = get_recommended_model() | |
| print(f"π₯ Downloading {model_info['name']}...") | |
| print(f" Repository: {model_info['repo_id']}") | |
| print(f" File: {model_info['filename']}") | |
| print(f" Size: {model_info['size']}") | |
| print(f" Description: {model_info['description']}") | |
| try: | |
| # Create models directory if it doesn't exist | |
| os.makedirs(MODEL_DOWNLOAD_CONFIG['cache_dir'], exist_ok=True) | |
| # Download the model | |
| model_path = hf_hub_download( | |
| repo_id=model_info['repo_id'], | |
| filename=model_info['filename'], | |
| cache_dir=MODEL_DOWNLOAD_CONFIG['cache_dir'], | |
| resume_download=MODEL_DOWNLOAD_CONFIG['resume_download'], | |
| token=MODEL_DOWNLOAD_CONFIG['use_auth_token'] | |
| ) | |
| print(f"β Model downloaded successfully!") | |
| print(f" Path: {model_path}") | |
| # Create a symlink in the models directory for easy access | |
| symlink_path = os.path.join(MODEL_DOWNLOAD_CONFIG['cache_dir'], "model.gguf") | |
| if os.path.exists(symlink_path): | |
| os.remove(symlink_path) | |
| try: | |
| os.symlink(model_path, symlink_path) | |
| print(f" Symlink created: {symlink_path}") | |
| except OSError: | |
| # Symlinks might not work on all systems, just copy the path | |
| print(f" Use this path in your code: {model_path}") | |
| return model_path | |
| except Exception as e: | |
| print(f"β Error downloading model: {e}") | |
| print("π‘ You can manually download a GGUF model and place it in ./models/") | |
| return None | |
| def list_available_models(): | |
| """List models available in the models directory""" | |
| models_dir = MODEL_DOWNLOAD_CONFIG['cache_dir'] | |
| if not os.path.exists(models_dir): | |
| print(f"π Models directory doesn't exist: {models_dir}") | |
| return [] | |
| model_files = [] | |
| for file in os.listdir(models_dir): | |
| if file.endswith('.gguf') or file.endswith('.ggml'): | |
| file_path = os.path.join(models_dir, file) | |
| file_size = os.path.getsize(file_path) | |
| model_files.append({ | |
| 'name': file, | |
| 'path': file_path, | |
| 'size_mb': file_size / (1024 * 1024) | |
| }) | |
| if model_files: | |
| print("π Available models:") | |
| for model in model_files: | |
| print(f" - {model['name']} ({model['size_mb']:.1f} MB)") | |
| else: | |
| print("π No GGUF/GGML models found in models directory") | |
| return model_files | |
| if __name__ == "__main__": | |
| print("π€ Model Download Utility for llama.cpp") | |
| print("=" * 50) | |
| # List existing models | |
| print("\nπ Checking for existing models...") | |
| existing_models = list_available_models() | |
| if not existing_models: | |
| print("\nπ₯ No models found. Downloading sample model...") | |
| download_sample_model() | |
| else: | |
| print(f"\nβ Found {len(existing_models)} existing model(s)") | |
| # Ask if user wants to download another model | |
| print("\nβ Download sample model anyway? (y/n): ", end="") | |
| try: | |
| response = input().lower().strip() | |
| if response in ['y', 'yes']: | |
| download_sample_model() | |
| else: | |
| print("π Using existing models") | |
| except (EOFError, KeyboardInterrupt): | |
| print("\nπ Using existing models") |