File size: 9,811 Bytes
d1aea16
 
a2ba3d2
 
d1aea16
 
d1e5d7d
d1aea16
ad4defa
d1aea16
 
27e2f3a
d1aea16
27e2f3a
 
 
d1aea16
a2ba3d2
27e2f3a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d1aea16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27e2f3a
d1aea16
27e2f3a
 
 
 
 
d1aea16
 
 
 
 
 
d1e5d7d
d1aea16
 
 
 
 
 
 
 
 
 
27e2f3a
 
 
 
 
a09117f
d1aea16
27e2f3a
d1aea16
 
a09117f
27e2f3a
 
 
 
 
 
 
 
 
 
 
d1aea16
27e2f3a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d1aea16
 
 
 
 
a09117f
d1aea16
a09117f
d1aea16
 
 
 
 
 
 
 
 
 
 
 
 
 
d1e5d7d
d1aea16
 
 
d1e5d7d
d1aea16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27e2f3a
 
 
d1aea16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
import requests
import json
from sentence_transformers import SentenceTransformer
import numpy as np
from sklearn.metrics.pairwise import cosine_similarity
from recommender import CourseRecommender

class Chatbot:
    def __init__(self):
        self.qa_pairs = []
        self.question_embeddings = []
        self.model = None
        self.database_url = "https://database-46m3.onrender.com"
        self.recommender = None
        self.load_model()
        self.load_recommender()
        self.load_qa_data()
    
    def load_model(self):
        """Load the sentence transformer model with error handling"""
        import time
        import os
        
        # List of models to try in order of preference
        models_to_try = [
            'all-MiniLM-L6-v2',
            'paraphrase-MiniLM-L6-v2',
            'all-MiniLM-L12-v2'
        ]
        
        for model_name in models_to_try:
            try:
                print(f"Loading sentence transformer model: {model_name}...")
                
                # Try with cache directory first
                cache_dir = os.path.join(os.getcwd(), 'model_cache')
                os.makedirs(cache_dir, exist_ok=True)
                
                self.model = SentenceTransformer(model_name, cache_folder=cache_dir)
                print(f"βœ… Model {model_name} loaded successfully")
                return
                
            except Exception as e:
                print(f"❌ Error loading {model_name}: {str(e)}")
                continue
        
        # If all models fail, try without cache
        try:
            print("Trying without cache directory...")
            self.model = SentenceTransformer('all-MiniLM-L6-v2')
            print("βœ… Model loaded successfully without cache")
        except Exception as e:
            print(f"❌ Final attempt failed: {str(e)}")
            raise Exception("Could not load any sentence transformer model")
    
    def load_recommender(self):
        """Load the course recommender with error handling"""
        try:
            print("Loading course recommender...")
            self.recommender = CourseRecommender()
            print("βœ… Recommender loaded successfully")
        except Exception as e:
            print(f"❌ Error loading recommender: {str(e)}")
            self.recommender = None
    
    def load_qa_data(self):
        """Load Q&A pairs from the faqs table in the database"""
        try:
            # Connect to the faqs table endpoint
            faqs_url = f"{self.database_url}/faqs"
            response = requests.get(faqs_url)
            if response.status_code == 200:
                data = response.json()
                # Assuming the database returns a list of FAQ objects
                if isinstance(data, list):
                    self.qa_pairs = data
                else:
                    # If it's a single object, wrap it in a list
                    self.qa_pairs = [data]
                
                # Generate embeddings for all questions if model is available
                questions = [item.get('question', '') for item in self.qa_pairs]
                if self.model is not None:
                    self.question_embeddings = self.model.encode(questions)
                    print(f"Loaded {len(self.qa_pairs)} FAQ pairs with embeddings from database")
                else:
                    print(f"Loaded {len(self.qa_pairs)} FAQ pairs from database (using fallback matching)")
            else:
                print(f"Failed to load data from faqs table. Status code: {response.status_code}")
                self._load_fallback_data()
        except Exception as e:
            print(f"Error loading FAQ data: {str(e)}")
            self._load_fallback_data()
    
    def _load_fallback_data(self):
        """Load fallback data if database is unavailable"""
        self.qa_pairs = [
            {"question": "What is artificial intelligence?", "answer": "Artificial Intelligence (AI) is a branch of computer science that aims to create machines capable of intelligent behavior."},
            {"question": "How does machine learning work?", "answer": "Machine learning is a subset of AI that enables computers to learn and improve from experience without being explicitly programmed."},
            {"question": "What is deep learning?", "answer": "Deep learning is a subset of machine learning that uses neural networks with multiple layers to model and understand complex patterns in data."},
            {"question": "What is natural language processing?", "answer": "Natural Language Processing (NLP) is a field of AI that focuses on the interaction between computers and humans through natural language."},
            {"question": "What is a neural network?", "answer": "A neural network is a computing system inspired by biological neural networks that constitute animal brains. It consists of interconnected nodes (neurons) that process information."}
        ]
        questions = [item['question'] for item in self.qa_pairs]
        if self.model is not None:
            self.question_embeddings = self.model.encode(questions)
            print("Loaded fallback Q&A data with embeddings")
        else:
            print("Loaded fallback Q&A data (using fallback matching)")
    
    def find_best_match(self, user_input, threshold=0.7):
        """Find the best matching question using semantic similarity or fallback text matching"""
        if not self.qa_pairs:
            return None, 0
        
        if self.model is not None and len(self.question_embeddings) > 0:
            # Use AI model for semantic matching
            user_embedding = self.model.encode([user_input])
            similarities = cosine_similarity(user_embedding, self.question_embeddings)[0]
            best_match_idx = np.argmax(similarities)
            best_similarity = similarities[best_match_idx]
            
            if best_similarity >= threshold:
                return self.qa_pairs[best_match_idx], best_similarity
            else:
                return None, best_similarity
        else:
            # Fallback to simple text matching
            user_input_lower = user_input.lower()
            best_match = None
            best_score = 0
            
            for qa_pair in self.qa_pairs:
                question = qa_pair.get('question', '').lower()
                # Simple keyword matching
                common_words = set(user_input_lower.split()) & set(question.split())
                if common_words:
                    score = len(common_words) / max(len(user_input_lower.split()), len(question.split()))
                    if score > best_score and score >= 0.3:  # Lower threshold for fallback
                        best_score = score
                        best_match = qa_pair
            
            if best_match:
                return best_match, best_score
            else:
                return None, 0
    
    def get_response(self, user_input):
        """Get response for user input"""
        if not user_input.strip():
            return "Please enter a message."
        
        best_match, similarity = self.find_best_match(user_input)
        
        if best_match:
            return {
                'answer': best_match.get('answer', 'No answer found'),
                'confidence': float(similarity),
                'matched_question': best_match.get('question', ''),
                'status': 'success'
            }
        else:
            return {
                'answer': "I'm sorry, I couldn't find a relevant answer to your question. Could you please rephrase it or ask something else?",
                'confidence': float(similarity),
                'matched_question': '',
                'status': 'no_match'
            }
    
    def get_qa_count(self):
        """Get the number of loaded Q&A pairs"""
        return len(self.qa_pairs)
    
    def get_course_recommendations(self, stanine, gwa, strand, hobbies):
        """Get course recommendations using the recommender system"""
        try:
            # Validate inputs
            stanine = int(stanine) if isinstance(stanine, str) else stanine
            gwa = float(gwa) if isinstance(gwa, str) else gwa
            
            if not (1 <= stanine <= 9):
                return "❌ Stanine score must be between 1 and 9"
            if not (75 <= gwa <= 100):
                return "❌ GWA must be between 75 and 100"
            if not strand:
                return "❌ Please select a strand"
            if not hobbies or not str(hobbies).strip():
                return "❌ Please enter your hobbies/interests"
            
            if self.recommender is None:
                return "❌ Course recommendation system is not available at the moment. Please try again later."
            
            # Get recommendations
            recommendations = self.recommender.recommend_courses(
                stanine=stanine,
                gwa=gwa,
                strand=strand,
                hobbies=str(hobbies)
            )
            
            if not recommendations:
                return "No recommendations available at the moment."
            
            # Format response (without confidence scores)
            response = f"## 🎯 Course Recommendations for You\n\n"
            response += f"**Profile:** Stanine {stanine}, GWA {gwa}, {strand} Strand\n"
            response += f"**Interests:** {hobbies}\n\n"
            
            for i, rec in enumerate(recommendations, 1):
                response += f"### {i}. {rec['code']} - {rec['name']}\n\n"
            
            return response
            
        except Exception as e:
            return f"❌ Error getting recommendations: {str(e)}"