Spaces:
Runtime error
Runtime error
| from huggingface_hub import InferenceClient | |
| import gradio as gr | |
| import json | |
| client = InferenceClient( | |
| "mistralai/Mistral-7B-Instruct-v0.3" | |
| ) | |
| DATABASE_PATH = "database.json" | |
| def load_database(): | |
| try: | |
| with open(DATABASE_PATH, "r") as file: | |
| return json.load(file) | |
| except FileNotFoundError: | |
| return {} | |
| def save_database(database): | |
| with open(DATABASE_PATH, "w") as file: | |
| json.dump(database, file) | |
| def format_prompt(message, history): | |
| prompt = "<s>" | |
| for user_prompt, bot_response in history: | |
| prompt += f"[INST] {user_prompt} [/INST]" | |
| prompt += f" {bot_response}</s> " | |
| prompt += f"[INST] {message} [/INST]" | |
| return prompt | |
| def generate( | |
| prompt, history, temperature=0.9, max_new_tokens=2000, top_p=0.9, repetition_penalty=1.2, | |
| ): | |
| database = load_database() # Load the database | |
| temperature = float(temperature) | |
| if temperature < 1e-2: | |
| temperature = 1e-2 | |
| top_p = float(top_p) | |
| formatted_prompt = format_prompt(prompt, history) | |
| if formatted_prompt in database: | |
| response = database[formatted_prompt] | |
| else: | |
| response = client.text_generation(formatted_prompt, details=True, return_full_text=False) | |
| response_text = response.generated_text | |
| database[formatted_prompt] = response_text | |
| save_database(database) # Save the updated database | |
| yield response_text | |
| css = """ | |
| #mkd { | |
| height: 500px; | |
| overflow: auto; | |
| border: 1px solid #ccc; | |
| } | |
| """ | |
| with gr.Blocks(css=css) as demo: | |
| gr.ChatInterface( | |
| generate, | |
| examples=[["What is the secret to life?"], ["Write me a recipe for pancakes."], ["Write a short story about Paris."]] | |
| ) | |
| demo.launch(debug=True) | |