rdune71 commited on
Commit
11f438b
·
1 Parent(s): 87a7535

Implement Ollama status monitor

Browse files
Files changed (1) hide show
  1. services/ollama_monitor.py +41 -0
services/ollama_monitor.py CHANGED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from utils.config import config
3
+
4
+ def check_ollama_status():
5
+ """
6
+ Checks if Ollama is running and which model is loaded.
7
+
8
+ Returns:
9
+ dict: {
10
+ "running": True/False,
11
+ "model_loaded": "mistral-7b" or None,
12
+ "ngrok_url": "https://a877ef1aa487.ngrok-free.app/",
13
+ "local_url": "http://localhost:11434/"
14
+ }
15
+ """
16
+ ngrok_url = "https://a877ef1aa487.ngrok-free.app/"
17
+ local_url = config.ollama_host # From .env
18
+
19
+ def _get_model_from_url(base_url):
20
+ try:
21
+ response = requests.get(f"{base_url}/api/tags", timeout=3)
22
+ if response.status_code == 200:
23
+ models = response.json().get("models", [])
24
+ if models:
25
+ return models[0].get("name")
26
+ except Exception:
27
+ return None
28
+ return None
29
+
30
+ local_model = _get_model_from_url(local_url)
31
+ remote_model = _get_model_from_url(ngrok_url)
32
+
33
+ model_loaded = local_model or remote_model
34
+ running = bool(model_loaded)
35
+
36
+ return {
37
+ "running": running,
38
+ "model_loaded": model_loaded,
39
+ "ngrok_url": ngrok_url,
40
+ "local_url": local_url,
41
+ }