Spaces:
Configuration error
Configuration error
Make thread safe
Browse files
app.py
CHANGED
|
@@ -1,11 +1,11 @@
|
|
| 1 |
# Basic example for doing model-in-the-loop dynamic adversarial data collection
|
| 2 |
# using Gradio Blocks.
|
| 3 |
-
import concurrent.futures
|
| 4 |
import json
|
| 5 |
import os
|
| 6 |
import threading
|
| 7 |
import time
|
| 8 |
import uuid
|
|
|
|
| 9 |
from pathlib import Path
|
| 10 |
from typing import List
|
| 11 |
from urllib.parse import parse_qs
|
|
@@ -28,9 +28,9 @@ def generate_respone(chatbot: ConversationChain, input: str) -> str:
|
|
| 28 |
def generate_responses(chatbots: List[ConversationChain], inputs: List[str]) -> List[str]:
|
| 29 |
"""Generates parallel responses for a list of `langchain` chatbots."""
|
| 30 |
results = []
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
return results
|
| 35 |
|
| 36 |
|
|
@@ -116,11 +116,9 @@ with demo:
|
|
| 116 |
|
| 117 |
# Generate model prediction
|
| 118 |
def _predict(txt, state):
|
| 119 |
-
# TODO: parallelize this!
|
| 120 |
start = time.time()
|
| 121 |
responses = generate_responses(chatbots, [txt] * len(chatbots))
|
| 122 |
-
print(f"Time taken (
|
| 123 |
-
|
| 124 |
|
| 125 |
response2model_id = {}
|
| 126 |
for chatbot, response in zip(chatbots, responses):
|
|
|
|
| 1 |
# Basic example for doing model-in-the-loop dynamic adversarial data collection
|
| 2 |
# using Gradio Blocks.
|
|
|
|
| 3 |
import json
|
| 4 |
import os
|
| 5 |
import threading
|
| 6 |
import time
|
| 7 |
import uuid
|
| 8 |
+
from concurrent.futures import ThreadPoolExecutor
|
| 9 |
from pathlib import Path
|
| 10 |
from typing import List
|
| 11 |
from urllib.parse import parse_qs
|
|
|
|
| 28 |
def generate_responses(chatbots: List[ConversationChain], inputs: List[str]) -> List[str]:
|
| 29 |
"""Generates parallel responses for a list of `langchain` chatbots."""
|
| 30 |
results = []
|
| 31 |
+
with ThreadPoolExecutor(max_workers=100) as executor:
|
| 32 |
+
for result in executor.map(generate_respone, chatbots, inputs):
|
| 33 |
+
results.append(result)
|
| 34 |
return results
|
| 35 |
|
| 36 |
|
|
|
|
| 116 |
|
| 117 |
# Generate model prediction
|
| 118 |
def _predict(txt, state):
|
|
|
|
| 119 |
start = time.time()
|
| 120 |
responses = generate_responses(chatbots, [txt] * len(chatbots))
|
| 121 |
+
print(f"Time taken to generate {len(chatbots)} responses : {time.time() - start:.2f} seconds")
|
|
|
|
| 122 |
|
| 123 |
response2model_id = {}
|
| 124 |
for chatbot, response in zip(chatbots, responses):
|