rdune71 commited on
Commit
da339ca
·
1 Parent(s): 6015c25

Add Ollama connectivity diagnostic script and update README

Browse files
Files changed (2) hide show
  1. README.md +11 -1
  2. diagnose_ollama.py +79 -0
README.md CHANGED
@@ -24,6 +24,16 @@ A personal development assistant powered by LLMs.
24
 
25
  ## Deployment
26
 
27
- The application is designed to work in Hugging Face Spaces environment. For local LLM inference, it connects to a remote Ollama instance via ngrok tunnel at `https://a877ef1aa487.ngrok-free.app`. This allows the application to access powerful local models without requiring them to be installed directly in the Space.
28
 
29
  In case the remote Ollama instance is unavailable, the system gracefully falls back to checking a local instance, and handles unavailability by showing appropriate status messages in the UI.
 
 
 
 
 
 
 
 
 
 
 
24
 
25
  ## Deployment
26
 
27
+ The application is designed to work in Hugging Face Spaces environment. For local LLM inference, it connects to a remote Ollama instance via ngrok tunnel at `https://ace32bd59aef.ngrok-free.app`. This allows the application to access powerful local models without requiring them to be installed directly in the Space.
28
 
29
  In case the remote Ollama instance is unavailable, the system gracefully falls back to checking a local instance, and handles unavailability by showing appropriate status messages in the UI.
30
+
31
+ ## Troubleshooting
32
+
33
+ If you're experiencing connection issues with Ollama, you can run the diagnostic script:
34
+
35
+ ```
36
+ python diagnose_ollama.py
37
+ ```
38
+
39
+ This will test connectivity to your configured Ollama host and provide detailed information about any connection problems.
diagnose_ollama.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Diagnostic script to test Ollama connectivity
4
+ """
5
+
6
+ import requests
7
+ from utils.config import config
8
+
9
+ def test_ollama_connectivity():
10
+ """Test if Ollama is reachable from the current environment"""
11
+
12
+ print("=== Ollama Connectivity Diagnostic ===")
13
+ print(f"Configured Ollama Host: {config.ollama_host}")
14
+ print()
15
+
16
+ # Headers to skip ngrok browser warning
17
+ headers = {
18
+ "ngrok-skip-browser-warning": "true",
19
+ "User-Agent": "AI-Life-Coach-Diagnostic"
20
+ }
21
+
22
+ # Test 1: Check if we can reach the Ollama host
23
+ print("Test 1: Checking Ollama host connectivity...")
24
+ try:
25
+ response = requests.get(
26
+ f"{config.ollama_host}/api/tags",
27
+ headers=headers,
28
+ timeout=10
29
+ )
30
+ print(f" Status Code: {response.status_code}")
31
+
32
+ if response.status_code == 200:
33
+ print(" ✓ Successfully connected to Ollama host")
34
+ try:
35
+ data = response.json()
36
+ models = data.get("models", [])
37
+ print(f" Available Models: {len(models)}")
38
+ for model in models:
39
+ print(f" - {model.get('name', 'Unknown model')}")
40
+ except Exception as e:
41
+ print(f" Error parsing response: {e}")
42
+ print(f" Response text: {response.text[:200]}...")
43
+ else:
44
+ print(f" ✗ Unexpected status code: {response.status_code}")
45
+ print(f" Response: {response.text[:200]}...")
46
+
47
+ except requests.exceptions.Timeout:
48
+ print(" ✗ Request timed out (took more than 10 seconds)")
49
+ except requests.exceptions.ConnectionError as e:
50
+ print(f" ✗ Connection error: {e}")
51
+ except Exception as e:
52
+ print(f" ✗ Unexpected error: {e}")
53
+
54
+ print()
55
+
56
+ # Test 2: Check localhost as fallback
57
+ print("Test 2: Checking localhost fallback (if different from configured host)...")
58
+ if config.ollama_host != "http://localhost:11434":
59
+ try:
60
+ local_response = requests.get(
61
+ "http://localhost:11434/api/tags",
62
+ headers=headers,
63
+ timeout=5
64
+ )
65
+ print(f" Local Status Code: {local_response.status_code}")
66
+ if local_response.status_code == 200:
67
+ print(" ✓ Successfully connected to localhost Ollama")
68
+ else:
69
+ print(f" ✗ Local connection failed with status {local_response.status_code}")
70
+ except Exception as e:
71
+ print(f" ✗ Local connection error: {e}")
72
+ else:
73
+ print(" Skipping (configured host is already localhost)")
74
+
75
+ print()
76
+ print("=== Diagnostic Complete ===")
77
+
78
+ if __name__ == "__main__":
79
+ test_ollama_connectivity()