Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -117,7 +117,7 @@ def chat_inf_a(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
|
|
| 117 |
if len(client_choice)>=hid_val:
|
| 118 |
if system_prompt:
|
| 119 |
system_prompt=f'{system_prompt}, '
|
| 120 |
-
|
| 121 |
if not history:
|
| 122 |
history = []
|
| 123 |
hist_len=0
|
|
@@ -131,9 +131,9 @@ def chat_inf_a(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
|
|
| 131 |
)
|
| 132 |
#formatted_prompt=prompt
|
| 133 |
formatted_prompt = format_prompt_choose(f"{system_prompt}{prompt}", history, client_choice[0])
|
| 134 |
-
|
| 135 |
output = ""
|
| 136 |
-
for response in
|
| 137 |
output += response.token.text
|
| 138 |
yield [(prompt,output)]
|
| 139 |
history.append((prompt,output))
|
|
@@ -146,7 +146,7 @@ def chat_inf_b(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
|
|
| 146 |
if len(client_choice)>=hid_val:
|
| 147 |
if system_prompt:
|
| 148 |
system_prompt=f'{system_prompt}, '
|
| 149 |
-
|
| 150 |
if not history:
|
| 151 |
history = []
|
| 152 |
hist_len=0
|
|
@@ -160,9 +160,9 @@ def chat_inf_b(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
|
|
| 160 |
)
|
| 161 |
#formatted_prompt=prompt
|
| 162 |
formatted_prompt = format_prompt_choose(f"{system_prompt}{prompt}", history, client_choice[1])
|
| 163 |
-
|
| 164 |
output = ""
|
| 165 |
-
for response in
|
| 166 |
output += response.token.text
|
| 167 |
yield [(prompt,output)]
|
| 168 |
history.append((prompt,output))
|
|
@@ -174,7 +174,7 @@ def chat_inf_c(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
|
|
| 174 |
if len(client_choice)>=hid_val:
|
| 175 |
if system_prompt:
|
| 176 |
system_prompt=f'{system_prompt}, '
|
| 177 |
-
|
| 178 |
if not history:
|
| 179 |
history = []
|
| 180 |
hist_len=0
|
|
@@ -187,10 +187,10 @@ def chat_inf_c(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
|
|
| 187 |
seed=seed,
|
| 188 |
)
|
| 189 |
#formatted_prompt=prompt
|
| 190 |
-
formatted_prompt = format_prompt_choose(f"{system_prompt}
|
| 191 |
-
|
| 192 |
output = ""
|
| 193 |
-
for response in
|
| 194 |
output += response.token.text
|
| 195 |
yield [(prompt,output)]
|
| 196 |
history.append((prompt,output))
|
|
@@ -202,7 +202,7 @@ def chat_inf_d(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
|
|
| 202 |
if len(client_choice)>=hid_val:
|
| 203 |
if system_prompt:
|
| 204 |
system_prompt=f'{system_prompt}, '
|
| 205 |
-
|
| 206 |
if not history:
|
| 207 |
history = []
|
| 208 |
hist_len=0
|
|
@@ -215,10 +215,10 @@ def chat_inf_d(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
|
|
| 215 |
seed=seed,
|
| 216 |
)
|
| 217 |
#formatted_prompt=prompt
|
| 218 |
-
formatted_prompt = format_prompt_choose(f"{system_prompt}
|
| 219 |
-
|
| 220 |
output = ""
|
| 221 |
-
for response in
|
| 222 |
output += response.token.text
|
| 223 |
yield [(prompt,output)]
|
| 224 |
history.append((prompt,output))
|
|
|
|
| 117 |
if len(client_choice)>=hid_val:
|
| 118 |
if system_prompt:
|
| 119 |
system_prompt=f'{system_prompt}, '
|
| 120 |
+
client1=client_z[int(hid_val)-1]
|
| 121 |
if not history:
|
| 122 |
history = []
|
| 123 |
hist_len=0
|
|
|
|
| 131 |
)
|
| 132 |
#formatted_prompt=prompt
|
| 133 |
formatted_prompt = format_prompt_choose(f"{system_prompt}{prompt}", history, client_choice[0])
|
| 134 |
+
stream1 = client1.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
| 135 |
output = ""
|
| 136 |
+
for response in stream1:
|
| 137 |
output += response.token.text
|
| 138 |
yield [(prompt,output)]
|
| 139 |
history.append((prompt,output))
|
|
|
|
| 146 |
if len(client_choice)>=hid_val:
|
| 147 |
if system_prompt:
|
| 148 |
system_prompt=f'{system_prompt}, '
|
| 149 |
+
client2=client_z[int(hid_val)-1]
|
| 150 |
if not history:
|
| 151 |
history = []
|
| 152 |
hist_len=0
|
|
|
|
| 160 |
)
|
| 161 |
#formatted_prompt=prompt
|
| 162 |
formatted_prompt = format_prompt_choose(f"{system_prompt}{prompt}", history, client_choice[1])
|
| 163 |
+
stream2 = client2.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
| 164 |
output = ""
|
| 165 |
+
for response in stream2:
|
| 166 |
output += response.token.text
|
| 167 |
yield [(prompt,output)]
|
| 168 |
history.append((prompt,output))
|
|
|
|
| 174 |
if len(client_choice)>=hid_val:
|
| 175 |
if system_prompt:
|
| 176 |
system_prompt=f'{system_prompt}, '
|
| 177 |
+
client3=client_z[int(hid_val)-1]
|
| 178 |
if not history:
|
| 179 |
history = []
|
| 180 |
hist_len=0
|
|
|
|
| 187 |
seed=seed,
|
| 188 |
)
|
| 189 |
#formatted_prompt=prompt
|
| 190 |
+
formatted_prompt = format_prompt_choose(f"{system_prompt}{prompt}", history, client_choice[2])
|
| 191 |
+
stream3 = client3.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
| 192 |
output = ""
|
| 193 |
+
for response in stream3:
|
| 194 |
output += response.token.text
|
| 195 |
yield [(prompt,output)]
|
| 196 |
history.append((prompt,output))
|
|
|
|
| 202 |
if len(client_choice)>=hid_val:
|
| 203 |
if system_prompt:
|
| 204 |
system_prompt=f'{system_prompt}, '
|
| 205 |
+
client4=client_z[int(hid_val)-1]
|
| 206 |
if not history:
|
| 207 |
history = []
|
| 208 |
hist_len=0
|
|
|
|
| 215 |
seed=seed,
|
| 216 |
)
|
| 217 |
#formatted_prompt=prompt
|
| 218 |
+
formatted_prompt = format_prompt_choose(f"{system_prompt}{prompt}", history, client_choice[3])
|
| 219 |
+
stream4 = client4.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
| 220 |
output = ""
|
| 221 |
+
for response in stream4:
|
| 222 |
output += response.token.text
|
| 223 |
yield [(prompt,output)]
|
| 224 |
history.append((prompt,output))
|