Spaces:
Build error
Build error
cleanup instruct app. add queue so UI notifies users
Browse files- chat.py +3 -5
- app.py → instruct.py +7 -8
chat.py
CHANGED
|
@@ -65,9 +65,7 @@ start_message = """
|
|
| 65 |
"""
|
| 66 |
|
| 67 |
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
with blocks:
|
| 71 |
gr.Markdown("# GGML Spaces Demo")
|
| 72 |
|
| 73 |
chatbot = gr.Chatbot()
|
|
@@ -107,12 +105,12 @@ with blocks:
|
|
| 107 |
clear.click(lambda: None, None, chatbot, queue=False)
|
| 108 |
|
| 109 |
submit_click_event = submit.click(
|
| 110 |
-
fn=user, inputs=[message, chat_history_state], outputs=[message, chat_history_state], queue=
|
| 111 |
).then(
|
| 112 |
fn=chat, inputs=[chat_history_state, system_msg, max_tokens, temperature, top_p, top_k, repeat_penalty], outputs=[chatbot, chat_history_state], queue=True
|
| 113 |
)
|
| 114 |
message_submit_event = message.submit(
|
| 115 |
-
fn=user, inputs=[message, chat_history_state], outputs=[message, chat_history_state], queue=
|
| 116 |
).then(
|
| 117 |
fn=chat, inputs=[chat_history_state, system_msg, max_tokens, temperature, top_p, top_k, repeat_penalty], outputs=[chatbot, chat_history_state], queue=True
|
| 118 |
)
|
|
|
|
| 65 |
"""
|
| 66 |
|
| 67 |
|
| 68 |
+
with gr.Blocks() as blocks:
|
|
|
|
|
|
|
| 69 |
gr.Markdown("# GGML Spaces Demo")
|
| 70 |
|
| 71 |
chatbot = gr.Chatbot()
|
|
|
|
| 105 |
clear.click(lambda: None, None, chatbot, queue=False)
|
| 106 |
|
| 107 |
submit_click_event = submit.click(
|
| 108 |
+
fn=user, inputs=[message, chat_history_state], outputs=[message, chat_history_state], queue=True
|
| 109 |
).then(
|
| 110 |
fn=chat, inputs=[chat_history_state, system_msg, max_tokens, temperature, top_p, top_k, repeat_penalty], outputs=[chatbot, chat_history_state], queue=True
|
| 111 |
)
|
| 112 |
message_submit_event = message.submit(
|
| 113 |
+
fn=user, inputs=[message, chat_history_state], outputs=[message, chat_history_state], queue=True
|
| 114 |
).then(
|
| 115 |
fn=chat, inputs=[chat_history_state, system_msg, max_tokens, temperature, top_p, top_k, repeat_penalty], outputs=[chatbot, chat_history_state], queue=True
|
| 116 |
)
|
app.py → instruct.py
RENAMED
|
@@ -23,11 +23,10 @@ description = f"""llama.cpp implementation in python [https://github.com/abetlen
|
|
| 23 |
This is the {config["repo"]}/{config["file"]} model.
|
| 24 |
"""
|
| 25 |
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
|
|
|
| 23 |
This is the {config["repo"]}/{config["file"]} model.
|
| 24 |
"""
|
| 25 |
|
| 26 |
+
gr.Interface(
|
| 27 |
+
fn=generate_text,
|
| 28 |
+
inputs=input_text,
|
| 29 |
+
outputs=output_text,
|
| 30 |
+
title="Llama Language Model",
|
| 31 |
+
description=description,
|
| 32 |
+
).queue(max_size=16, concurrency_count=1).launch()
|
|
|