Spaces:
Sleeping
Sleeping
Update local changes
Browse files- seminar_edition_ai.py +27 -3
seminar_edition_ai.py
CHANGED
|
@@ -113,7 +113,7 @@ def predictProclamando(queryKey):
|
|
| 113 |
####
|
| 114 |
#
|
| 115 |
####
|
| 116 |
-
def predictFromInit( sermonTopic, llmModelList):
|
| 117 |
global HISTORY_ANSWER
|
| 118 |
keyStr = 'SERMON_TOPIC'
|
| 119 |
templates = SermonGeminiPromptTemplate()
|
|
@@ -178,9 +178,11 @@ def predictFromInit( sermonTopic, llmModelList):
|
|
| 178 |
####
|
| 179 |
#
|
| 180 |
####
|
| 181 |
-
def predictQuestionBuild(sermonTopic):
|
|
|
|
| 182 |
templates = SermonGeminiPromptTemplate()
|
| 183 |
chain = updatePromptTemplate(
|
|
|
|
| 184 |
templates.getSermonPromptTemplates()['BUILD_QUESTION'],
|
| 185 |
['SERMON_IDEA', 'context']
|
| 186 |
)
|
|
@@ -217,15 +219,37 @@ def predictQuestionBuild(sermonTopic):
|
|
| 217 |
####
|
| 218 |
#
|
| 219 |
####
|
| 220 |
-
def predictDevotionBuild(sermonTopic):
|
| 221 |
templates = SermonGeminiPromptTemplate()
|
|
|
|
| 222 |
|
| 223 |
chain = updatePromptTemplate(
|
|
|
|
| 224 |
templates.getSermonPromptTemplate()['BUILD_REFLECTIONS'],
|
| 225 |
['SERMON_IDEA', 'context']
|
| 226 |
)
|
|
|
|
| 227 |
global retriever
|
| 228 |
global HISTORY_ANSWER
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 229 |
answer = askQuestionEx(
|
| 230 |
HISTORY_ANSWER,
|
| 231 |
chain,
|
|
|
|
| 113 |
####
|
| 114 |
#
|
| 115 |
####
|
| 116 |
+
def predictFromInit( sermonTopic, llmModelList = []):
|
| 117 |
global HISTORY_ANSWER
|
| 118 |
keyStr = 'SERMON_TOPIC'
|
| 119 |
templates = SermonGeminiPromptTemplate()
|
|
|
|
| 178 |
####
|
| 179 |
#
|
| 180 |
####
|
| 181 |
+
def predictQuestionBuild(sermonTopic, llmModelList = []):
|
| 182 |
+
llm = llmModelList[0] if len(llmModelList) > 0 else None
|
| 183 |
templates = SermonGeminiPromptTemplate()
|
| 184 |
chain = updatePromptTemplate(
|
| 185 |
+
llm,
|
| 186 |
templates.getSermonPromptTemplates()['BUILD_QUESTION'],
|
| 187 |
['SERMON_IDEA', 'context']
|
| 188 |
)
|
|
|
|
| 219 |
####
|
| 220 |
#
|
| 221 |
####
|
| 222 |
+
def predictDevotionBuild(sermonTopic, llmModelList = []):
|
| 223 |
templates = SermonGeminiPromptTemplate()
|
| 224 |
+
llm = llmModelList[0] if len(llmModelList) > 0 else None
|
| 225 |
|
| 226 |
chain = updatePromptTemplate(
|
| 227 |
+
llm,
|
| 228 |
templates.getSermonPromptTemplate()['BUILD_REFLECTIONS'],
|
| 229 |
['SERMON_IDEA', 'context']
|
| 230 |
)
|
| 231 |
+
|
| 232 |
global retriever
|
| 233 |
global HISTORY_ANSWER
|
| 234 |
+
global embed_model
|
| 235 |
+
|
| 236 |
+
if embed_model == None:
|
| 237 |
+
llmBuilder = GeminiLLM()
|
| 238 |
+
embed_model = llmBuilder.getEmbeddingsModel()
|
| 239 |
+
|
| 240 |
+
if retriever == None:
|
| 241 |
+
doc = Document(page_content="text", metadata={"source": "local"})
|
| 242 |
+
|
| 243 |
+
vectorstore = Chroma.from_documents(
|
| 244 |
+
documents=[doc],
|
| 245 |
+
embedding=embed_model,
|
| 246 |
+
persist_directory="chroma_db_dir_sermon", # Local mode with in-memory storage only
|
| 247 |
+
collection_name="sermon_lab_ai"
|
| 248 |
+
)
|
| 249 |
+
retriever = vectorstore.as_retriever(
|
| 250 |
+
search_kwargs={"k": 3}
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
answer = askQuestionEx(
|
| 254 |
HISTORY_ANSWER,
|
| 255 |
chain,
|