Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -31,9 +31,9 @@ def chat_with_model(user_input, system_prompt, selected_model):
|
|
| 31 |
result = client.chat.completions.create(
|
| 32 |
model=selected_model,
|
| 33 |
messages=messages,
|
| 34 |
-
temperature=0.
|
| 35 |
-
max_tokens=
|
| 36 |
-
top_p=0.
|
| 37 |
stream=False # Stream disabled for simplicity
|
| 38 |
)
|
| 39 |
return result["choices"][0]["message"]["content"]
|
|
@@ -52,8 +52,8 @@ def get_prompt(name):
|
|
| 52 |
|
| 53 |
# List of available models
|
| 54 |
available_models = [
|
| 55 |
-
"
|
| 56 |
-
"
|
| 57 |
"HuggingFaceH4/zephyr-7b-beta",
|
| 58 |
"HuggingFaceH4/zephyr-7b-alpha"
|
| 59 |
]
|
|
|
|
| 31 |
result = client.chat.completions.create(
|
| 32 |
model=selected_model,
|
| 33 |
messages=messages,
|
| 34 |
+
temperature=0.9,
|
| 35 |
+
max_tokens=512,
|
| 36 |
+
top_p=0.97,
|
| 37 |
stream=False # Stream disabled for simplicity
|
| 38 |
)
|
| 39 |
return result["choices"][0]["message"]["content"]
|
|
|
|
| 52 |
|
| 53 |
# List of available models
|
| 54 |
available_models = [
|
| 55 |
+
"Qwen/Qwen2.5-Coder-0.5B-Instruct",
|
| 56 |
+
"Qwen/Qwen2.5-Coder-1.5B-Instruct",
|
| 57 |
"HuggingFaceH4/zephyr-7b-beta",
|
| 58 |
"HuggingFaceH4/zephyr-7b-alpha"
|
| 59 |
]
|