Spaces:
Sleeping
Sleeping
correcting reply index
Browse files- app.py +1 -1
- utils/haystack.py +3 -3
app.py
CHANGED
|
@@ -53,5 +53,5 @@ if st.session_state.get("api_key_configured"):
|
|
| 53 |
|
| 54 |
if st.session_state.result:
|
| 55 |
reply = st.session_state.result
|
| 56 |
-
st.write(reply[
|
| 57 |
|
|
|
|
| 53 |
|
| 54 |
if st.session_state.result:
|
| 55 |
reply = st.session_state.result
|
| 56 |
+
st.write(reply[0])
|
| 57 |
|
utils/haystack.py
CHANGED
|
@@ -65,7 +65,6 @@ Articles:
|
|
| 65 |
fetcher = PubMedFetcher()
|
| 66 |
|
| 67 |
pipe = Pipeline()
|
| 68 |
-
|
| 69 |
pipe.add_component("keyword_prompt_builder", keyword_prompt_builder)
|
| 70 |
pipe.add_component("keyword_llm", keyword_llm)
|
| 71 |
pipe.add_component("pubmed_fetcher", fetcher)
|
|
@@ -76,16 +75,17 @@ Articles:
|
|
| 76 |
pipe.connect("keyword_llm.replies", "pubmed_fetcher.queries")
|
| 77 |
|
| 78 |
pipe.connect("pubmed_fetcher.articles", "prompt_builder.articles")
|
| 79 |
-
pipe.connect("prompt_builder.prompt", "llm.prompt")
|
| 80 |
return pipe
|
| 81 |
|
| 82 |
|
| 83 |
@st.cache_data(show_spinner=True)
|
| 84 |
def query(query, _pipeline):
|
| 85 |
try:
|
| 86 |
-
|
| 87 |
"prompt_builder":{"question": query},
|
| 88 |
"answer_llm":{"generation_kwargs": {"max_new_tokens": 500}}})
|
|
|
|
| 89 |
except Exception as e:
|
| 90 |
result = ["Please make sure you are providing a correct, public Mastodon account"]
|
| 91 |
return result
|
|
|
|
| 65 |
fetcher = PubMedFetcher()
|
| 66 |
|
| 67 |
pipe = Pipeline()
|
|
|
|
| 68 |
pipe.add_component("keyword_prompt_builder", keyword_prompt_builder)
|
| 69 |
pipe.add_component("keyword_llm", keyword_llm)
|
| 70 |
pipe.add_component("pubmed_fetcher", fetcher)
|
|
|
|
| 75 |
pipe.connect("keyword_llm.replies", "pubmed_fetcher.queries")
|
| 76 |
|
| 77 |
pipe.connect("pubmed_fetcher.articles", "prompt_builder.articles")
|
| 78 |
+
pipe.connect("prompt_builder.prompt", "llm.prompt")
|
| 79 |
return pipe
|
| 80 |
|
| 81 |
|
| 82 |
@st.cache_data(show_spinner=True)
|
| 83 |
def query(query, _pipeline):
|
| 84 |
try:
|
| 85 |
+
replies = _pipeline.run(data={"keyword_prompt_builder":{"question":query},
|
| 86 |
"prompt_builder":{"question": query},
|
| 87 |
"answer_llm":{"generation_kwargs": {"max_new_tokens": 500}}})
|
| 88 |
+
result = replies['llm']['replies']
|
| 89 |
except Exception as e:
|
| 90 |
result = ["Please make sure you are providing a correct, public Mastodon account"]
|
| 91 |
return result
|