Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -47,6 +47,11 @@ def chat(history, system_message, max_tokens, temperature, top_p, top_k, repetit
|
|
| 47 |
# remove last space from assistant, some models output a ZWSP if you leave a space
|
| 48 |
messages = messages.rstrip()
|
| 49 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
prediction = make_prediction(
|
| 51 |
messages,
|
| 52 |
max_tokens=max_tokens,
|
|
@@ -103,7 +108,7 @@ with gr.Blocks(css=CSS) as demo:
|
|
| 103 |
max_tokens = gr.Slider(20, 2500, label="Max Tokens", step=20, value=500)
|
| 104 |
temperature = gr.Slider(0.0, 2.0, label="Temperature", step=0.1, value=0.4)
|
| 105 |
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.95)
|
| 106 |
-
top_k = gr.Slider(
|
| 107 |
repetition_penalty = gr.Slider(1.0, 2.0, label="Repetition Penalty", step=0.1, value=1.1)
|
| 108 |
|
| 109 |
system_msg = gr.Textbox(
|
|
|
|
| 47 |
# remove last space from assistant, some models output a ZWSP if you leave a space
|
| 48 |
messages = messages.rstrip()
|
| 49 |
|
| 50 |
+
# If temperature is set to 0, force Top P to 1 and Top K to -1
|
| 51 |
+
if temperature == 0:
|
| 52 |
+
top_p = 1
|
| 53 |
+
top_k = -1
|
| 54 |
+
|
| 55 |
prediction = make_prediction(
|
| 56 |
messages,
|
| 57 |
max_tokens=max_tokens,
|
|
|
|
| 108 |
max_tokens = gr.Slider(20, 2500, label="Max Tokens", step=20, value=500)
|
| 109 |
temperature = gr.Slider(0.0, 2.0, label="Temperature", step=0.1, value=0.4)
|
| 110 |
top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.95)
|
| 111 |
+
top_k = gr.Slider(1, 100, label="Top K", step=1, value=40)
|
| 112 |
repetition_penalty = gr.Slider(1.0, 2.0, label="Repetition Penalty", step=0.1, value=1.1)
|
| 113 |
|
| 114 |
system_msg = gr.Textbox(
|