Update README.md
Browse files
README.md
CHANGED
|
@@ -56,6 +56,7 @@ def llm_router(user_prompt, tokens_so_far = 0):
|
|
| 56 |
|
| 57 |
def chat(user_prompt, model_store_entry = None, curr_ctx = [], system_prompt = ' ', verbose=False):
|
| 58 |
if model_store_entry == None and curr_ctx == []:
|
|
|
|
| 59 |
model_store_entry = llm_router(user_prompt)
|
| 60 |
if verbose:
|
| 61 |
print(f'Classify prompt - selected model: {model_store_entry["model_id"]}')
|
|
@@ -69,12 +70,9 @@ def chat(user_prompt, model_store_entry = None, curr_ctx = [], system_prompt = '
|
|
| 69 |
url = model_store_entry['url']
|
| 70 |
api_key = model_store_entry['api_key']
|
| 71 |
model_id = model_store_entry['model_id']
|
| 72 |
-
# max_ctx = model_store_entry['max_ctx']
|
| 73 |
|
| 74 |
client = OpenAI(base_url=url, api_key=api_key)
|
| 75 |
-
# print(curr_ctx)
|
| 76 |
messages = curr_ctx
|
| 77 |
-
# print(messages)
|
| 78 |
messages.append({"role": "user", "content": user_prompt})
|
| 79 |
|
| 80 |
completion = client.chat.completions.create(
|
|
@@ -86,7 +84,6 @@ def chat(user_prompt, model_store_entry = None, curr_ctx = [], system_prompt = '
|
|
| 86 |
if verbose:
|
| 87 |
print(f'Used model: {model_id}')
|
| 88 |
print(f'completion: {completion}')
|
| 89 |
-
# return completion.choices[0].message.content
|
| 90 |
client.close()
|
| 91 |
return completion.choices[0].message.content, messages, model_store_entry
|
| 92 |
|
|
|
|
| 56 |
|
| 57 |
def chat(user_prompt, model_store_entry = None, curr_ctx = [], system_prompt = ' ', verbose=False):
|
| 58 |
if model_store_entry == None and curr_ctx == []:
|
| 59 |
+
# initial model selection
|
| 60 |
model_store_entry = llm_router(user_prompt)
|
| 61 |
if verbose:
|
| 62 |
print(f'Classify prompt - selected model: {model_store_entry["model_id"]}')
|
|
|
|
| 70 |
url = model_store_entry['url']
|
| 71 |
api_key = model_store_entry['api_key']
|
| 72 |
model_id = model_store_entry['model_id']
|
|
|
|
| 73 |
|
| 74 |
client = OpenAI(base_url=url, api_key=api_key)
|
|
|
|
| 75 |
messages = curr_ctx
|
|
|
|
| 76 |
messages.append({"role": "user", "content": user_prompt})
|
| 77 |
|
| 78 |
completion = client.chat.completions.create(
|
|
|
|
| 84 |
if verbose:
|
| 85 |
print(f'Used model: {model_id}')
|
| 86 |
print(f'completion: {completion}')
|
|
|
|
| 87 |
client.close()
|
| 88 |
return completion.choices[0].message.content, messages, model_store_entry
|
| 89 |
|