Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -109,7 +109,7 @@ def prompt_engineer(text, longtext, query):
|
|
| 109 |
sllm = HuggingFaceHub(
|
| 110 |
repo_id="facebook/bart-large-cnn", model_kwargs={"temperature": 0, "max_new_tokens": 256, "task":"text-generation"}
|
| 111 |
)
|
| 112 |
-
st.write("Chat llm connection started..")
|
| 113 |
except Exception as e:
|
| 114 |
st.error(f"Error invoke: {e}")
|
| 115 |
|
|
@@ -159,9 +159,9 @@ def prompt_engineer(text, longtext, query):
|
|
| 159 |
|
| 160 |
try:
|
| 161 |
llm = HuggingFaceHub(
|
| 162 |
-
repo_id="meta-llama/Meta-Llama-3-8B-Instruct", model_kwargs={"temperature": 0, "
|
| 163 |
)
|
| 164 |
-
st.write("
|
| 165 |
response_text = llm.invoke(prompt)
|
| 166 |
escaped_query = re.escape(query)
|
| 167 |
result = re.split(f'Answer the question based on the above context: {escaped_query}\n',response_text)[-1]
|
|
|
|
| 109 |
sllm = HuggingFaceHub(
|
| 110 |
repo_id="facebook/bart-large-cnn", model_kwargs={"temperature": 0, "max_new_tokens": 256, "task":"text-generation"}
|
| 111 |
)
|
| 112 |
+
st.write("Summary Chat llm connection started..")
|
| 113 |
except Exception as e:
|
| 114 |
st.error(f"Error invoke: {e}")
|
| 115 |
|
|
|
|
| 159 |
|
| 160 |
try:
|
| 161 |
llm = HuggingFaceHub(
|
| 162 |
+
repo_id="meta-llama/Meta-Llama-3-8B-Instruct", model_kwargs={"temperature": 0, "task":"text-generation"}
|
| 163 |
)
|
| 164 |
+
st.write("GEN llm connection started..")
|
| 165 |
response_text = llm.invoke(prompt)
|
| 166 |
escaped_query = re.escape(query)
|
| 167 |
result = re.split(f'Answer the question based on the above context: {escaped_query}\n',response_text)[-1]
|