Spaces:
Sleeping
Sleeping
Update helper_functions_api.py
Browse files- helper_functions_api.py +10 -3
helper_functions_api.py
CHANGED
|
@@ -190,13 +190,20 @@ def together_response(message, model = llm_default_small, SysPrompt = SysPromptD
|
|
| 190 |
response = together_client.chat.completions.create(**params)
|
| 191 |
return response.choices[0].message.content
|
| 192 |
|
| 193 |
-
def openrouter_response(messages,model="meta-llama/llama-3-70b-instruct:nitro"):
|
| 194 |
-
|
|
|
|
| 195 |
model=model,
|
| 196 |
messages=messages,
|
| 197 |
max_tokens=4096,
|
| 198 |
)
|
| 199 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 200 |
|
| 201 |
def json_from_text(text):
|
| 202 |
"""
|
|
|
|
| 190 |
response = together_client.chat.completions.create(**params)
|
| 191 |
return response.choices[0].message.content
|
| 192 |
|
| 193 |
+
def openrouter_response(messages, model="meta-llama/llama-3-70b-instruct:nitro"):
|
| 194 |
+
try:
|
| 195 |
+
response = or_client.chat.completions.create(
|
| 196 |
model=model,
|
| 197 |
messages=messages,
|
| 198 |
max_tokens=4096,
|
| 199 |
)
|
| 200 |
+
|
| 201 |
+
response_message = response.choices[0].message.content
|
| 202 |
+
return response_message
|
| 203 |
+
except Exception as e:
|
| 204 |
+
print(f"An error occurred: {str(e)}")
|
| 205 |
+
return None
|
| 206 |
+
|
| 207 |
|
| 208 |
def json_from_text(text):
|
| 209 |
"""
|