Spaces:
Build error
Build error
Create dummy_code.txt
Browse files- dummy_code.txt +20 -0
dummy_code.txt
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Initialize LLM
|
| 2 |
+
class LLMCallbackHandler(BaseCallbackHandler):
|
| 3 |
+
def __init__(self, log_path: Path):
|
| 4 |
+
self.log_path = log_path
|
| 5 |
+
|
| 6 |
+
def on_llm_start(self, serialized, prompts, **kwargs):
|
| 7 |
+
with self.log_path.open("a", encoding="utf-8") as file:
|
| 8 |
+
file.write(json.dumps({"event": "llm_start", "text": prompts[0], "timestamp": datetime.now().isoformat()}) + "\n")
|
| 9 |
+
|
| 10 |
+
def on_llm_end(self, response: LLMResult, **kwargs):
|
| 11 |
+
generation = response.generations[-1][-1].message.content
|
| 12 |
+
with self.log_path.open("a", encoding="utf-8") as file:
|
| 13 |
+
file.write(json.dumps({"event": "llm_end", "text": generation, "timestamp": datetime.now().isoformat()}) + "\n")
|
| 14 |
+
|
| 15 |
+
llm = ChatGroq(
|
| 16 |
+
temperature=0,
|
| 17 |
+
model_name="groq/llama-3.3-70b-versatile",
|
| 18 |
+
max_tokens=500,
|
| 19 |
+
callbacks=[LLMCallbackHandler(Path("prompts.jsonl"))],
|
| 20 |
+
)
|