Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,6 +4,13 @@ import gradio as gr
|
|
| 4 |
from openai import OpenAI
|
| 5 |
|
| 6 |
from optillm.cot_reflection import cot_reflection
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
API_KEY = os.environ.get("OPENROUTER_API_KEY")
|
| 9 |
|
|
@@ -29,9 +36,23 @@ def respond(
|
|
| 29 |
|
| 30 |
messages.append({"role": "user", "content": message})
|
| 31 |
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
return final_response
|
| 36 |
|
| 37 |
# for message in client.chat_completion(
|
|
@@ -58,7 +79,7 @@ demo = gr.ChatInterface(
|
|
| 58 |
value="nousresearch/hermes-3-llama-3.1-405b:free", label="Model", info="Choose the base model"
|
| 59 |
),
|
| 60 |
gr.Dropdown(
|
| 61 |
-
["leap", "plansearch", "rstar", "cot_reflection"], value="cot_reflection", label="Approach", info="Choose the approach"
|
| 62 |
),
|
| 63 |
gr.Textbox(value="", label="System message"),
|
| 64 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
|
@@ -73,6 +94,5 @@ demo = gr.ChatInterface(
|
|
| 73 |
],
|
| 74 |
)
|
| 75 |
|
| 76 |
-
|
| 77 |
if __name__ == "__main__":
|
| 78 |
demo.launch()
|
|
|
|
| 4 |
from openai import OpenAI
|
| 5 |
|
| 6 |
from optillm.cot_reflection import cot_reflection
|
| 7 |
+
from optillm.rto import round_trip_optimization
|
| 8 |
+
from optillm.z3_solver import Z3SolverSystem
|
| 9 |
+
from optillm.self_consistency import advanced_self_consistency_approach
|
| 10 |
+
from optillm.rstar import RStar
|
| 11 |
+
from optillm.plansearch import plansearch
|
| 12 |
+
from optillm.leap import leap
|
| 13 |
+
|
| 14 |
|
| 15 |
API_KEY = os.environ.get("OPENROUTER_API_KEY")
|
| 16 |
|
|
|
|
| 36 |
|
| 37 |
messages.append({"role": "user", "content": message})
|
| 38 |
|
| 39 |
+
if approach == 'rto':
|
| 40 |
+
final_response = round_trip_optimization(system_prompt, initial_query, client, model)
|
| 41 |
+
elif approach == 'z3':
|
| 42 |
+
z3_solver = Z3SolverSystem(system_prompt, client, model)
|
| 43 |
+
final_response = z3_solver.process_query(initial_query)
|
| 44 |
+
elif approach == "self_consistency":
|
| 45 |
+
final_response = advanced_self_consistency_approach(system_prompt, initial_query, client, model)
|
| 46 |
+
elif approach == "rstar":
|
| 47 |
+
rstar = RStar(system_prompt, client, model)
|
| 48 |
+
final_response = rstar.solve(initial_query)
|
| 49 |
+
elif approach == "cot_reflection":
|
| 50 |
+
final_response = cot_reflection(system_prompt, initial_query, client, model)
|
| 51 |
+
elif approach == 'plansearch':
|
| 52 |
+
final_response = plansearch(system_prompt, initial_query, client, model)
|
| 53 |
+
elif approach == 'leap':
|
| 54 |
+
final_response = leap(system_prompt, initial_query, client, model)
|
| 55 |
+
|
| 56 |
return final_response
|
| 57 |
|
| 58 |
# for message in client.chat_completion(
|
|
|
|
| 79 |
value="nousresearch/hermes-3-llama-3.1-405b:free", label="Model", info="Choose the base model"
|
| 80 |
),
|
| 81 |
gr.Dropdown(
|
| 82 |
+
["leap", "plansearch", "rstar", "cot_reflection", "rto", "self_consistency", "z3"], value="cot_reflection", label="Approach", info="Choose the approach"
|
| 83 |
),
|
| 84 |
gr.Textbox(value="", label="System message"),
|
| 85 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
|
|
|
| 94 |
],
|
| 95 |
)
|
| 96 |
|
|
|
|
| 97 |
if __name__ == "__main__":
|
| 98 |
demo.launch()
|