Spaces:
Sleeping
Sleeping
Upload model_manage.py (#3)
Browse files- Upload model_manage.py (3634f37f18c771844860fd89535eb6544fef8db7)
Co-authored-by: Nguyen Nguyen Anh <AnhLedger@users.noreply.huggingface.co>
- chat/model_manage.py +32 -0
chat/model_manage.py
CHANGED
|
@@ -167,3 +167,35 @@ def full_chain_single_question(input_prompt, db_instance):
|
|
| 167 |
except Exception as e:
|
| 168 |
# print(e)
|
| 169 |
return temp_answer, "Error occured: " + str(e)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 167 |
except Exception as e:
|
| 168 |
# print(e)
|
| 169 |
return temp_answer, "Error occured: " + str(e)
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def format_chat_history_from_web(chat_history: list):
|
| 173 |
+
temp_chat = []
|
| 174 |
+
for message in chat_history:
|
| 175 |
+
temp_chat.append(
|
| 176 |
+
{
|
| 177 |
+
"role": message["role"],
|
| 178 |
+
"parts": [message["content"]]
|
| 179 |
+
}
|
| 180 |
+
)
|
| 181 |
+
return temp_chat
|
| 182 |
+
|
| 183 |
+
def full_chain_history_question(chat_history: list, db_instance):
|
| 184 |
+
try:
|
| 185 |
+
temp_chat = format_chat_history_from_web(chat_history)
|
| 186 |
+
first_prompt = extract_keyword_prompt(temp_chat[-1]["parts"][0])
|
| 187 |
+
temp_answer = model.generate_content(first_prompt).text
|
| 188 |
+
|
| 189 |
+
args = json.loads(utils.trimming(temp_answer))
|
| 190 |
+
contexts, results = response(args, db_instance)
|
| 191 |
+
if not results:
|
| 192 |
+
# print(contexts)
|
| 193 |
+
return "Random question, direct return", contexts
|
| 194 |
+
else:
|
| 195 |
+
QA_Prompt = make_answer_prompt(temp_chat[-1]["parts"][0], contexts)
|
| 196 |
+
temp_chat[-1]["parts"] = QA_Prompt
|
| 197 |
+
answer = model.generate_content(temp_chat).text
|
| 198 |
+
return temp_answer, answer
|
| 199 |
+
except Exception as e:
|
| 200 |
+
# print(e)
|
| 201 |
+
return temp_answer, "Error occured: " + str(e)
|