Spaces:
Running
Running
| import gradio as gr | |
| import subprocess | |
| # Function to load a model using Hugging Face Spaces | |
| def load_model_from_space(model_name): | |
| print(f"Attempting to load {model_name}...") | |
| try: | |
| demo = gr.load(name=model_name, src="spaces") | |
| print(f"Successfully loaded {model_name}") | |
| return demo | |
| except Exception as e: | |
| print(f"Error loading model {model_name}: {e}") | |
| return None | |
| # Load the models | |
| deepseek_r1_distill = load_model_from_space("deepseek-ai/DeepSeek-R1-Distill-Qwen-32B") | |
| deepseek_r1 = load_model_from_space("deepseek-ai/DeepSeek-R1") | |
| deepseek_r1_zero = load_model_from_space("deepseek-ai/DeepSeek-R1-Zero") | |
| # --- Chatbot function --- | |
| def chatbot(input_text, history, model_choice, system_message, max_new_tokens, temperature, top_p): | |
| history = history or [] | |
| print(f"Input: {input_text}, History: {history}, Model: {model_choice}") | |
| # Choose the model based on user selection | |
| if model_choice == "DeepSeek-R1-Distill-Qwen-32B" and deepseek_r1_distill: | |
| model_demo = deepseek_r1_distill | |
| elif model_choice == "DeepSeek-R1" and deepseek_r1: | |
| model_demo = deepseek_r1 | |
| elif model_choice == "DeepSeek-R1-Zero" and deepseek_r1_zero: | |
| model_demo = deepseek_r1_zero | |
| else: | |
| default_response = "Model not selected or could not be loaded." | |
| history.append((input_text, default_response)) | |
| return history, history, "", model_choice, system_message, max_new_tokens, temperature, top_p | |
| # Call the model's 'predict' function. | |
| try: | |
| model_output = model_demo(input_text, history, max_new_tokens, temperature, top_p, system_message, fn_index=0) | |
| except Exception as e: | |
| print(f"An error occurred: {e}") | |
| model_output= "An error occurred please check the model and try again." | |
| history.append((input_text, model_output)) | |
| return history, history, "", model_choice, system_message, max_new_tokens, temperature, top_p | |
| # Check if model_output is iterable and has expected number of elements | |
| if not isinstance(model_output, (list, tuple)) or len(model_output) < 2: | |
| error_message = "Model output does not have the expected format." | |
| history.append((input_text, error_message)) | |
| return history, history, "", model_choice, system_message, max_new_tokens, temperature, top_p | |
| response = model_output[-1][1] if model_output[-1][1] else "Model did not return a response." | |
| history.append((input_text, response)) | |
| return history, history, "", model_choice, system_message, max_new_tokens, temperature, top_p | |
| # --- Gradio Interface --- | |
| with gr.Blocks(theme=gr.themes.Soft()) as demo: | |
| gr.Markdown( | |
| """ | |
| # DeepSeek Chatbot | |
| Created by [ruslanmv.com](https://ruslanmv.com/) | |
| This is a demo of different DeepSeek models. Select a model, type your message, and click "Submit". | |
| You can also adjust optional parameters like system message, max new tokens, temperature, and top-p. | |
| """ | |
| ) | |
| with gr.Row(): | |
| with gr.Column(): | |
| chatbot_output = gr.Chatbot(label="DeepSeek Chatbot", height=500) | |
| msg = gr.Textbox(label="Your Message", placeholder="Type your message here...") | |
| with gr.Row(): | |
| submit_btn = gr.Button("Submit", variant="primary") | |
| clear_btn = gr.ClearButton([msg, chatbot_output]) | |
| # Options moved below the chat interface | |
| with gr.Row(): | |
| with gr.Accordion("Options", open=True): | |
| model_choice = gr.Radio( | |
| choices=["DeepSeek-R1-Distill-Qwen-32B", "DeepSeek-R1", "DeepSeek-R1-Zero"], | |
| label="Choose a Model", | |
| value="DeepSeek-R1" | |
| ) | |
| with gr.Accordion("Optional Parameters", open=False): | |
| system_message = gr.Textbox( | |
| label="System Message", | |
| value="You are a friendly Chatbot created by ruslanmv.com", | |
| lines=2, | |
| ) | |
| max_new_tokens = gr.Slider( | |
| minimum=1, maximum=4000, value=200, label="Max New Tokens" | |
| ) | |
| temperature = gr.Slider( | |
| minimum=0.10, maximum=4.00, value=0.70, label="Temperature" | |
| ) | |
| top_p = gr.Slider( | |
| minimum=0.10, maximum=1.00, value=0.90, label="Top-p (nucleus sampling)" | |
| ) | |
| # Maintain chat history | |
| chat_history = gr.State([]) | |
| # Event handling | |
| submit_btn.click( | |
| chatbot, | |
| [msg, chat_history, model_choice, system_message, max_new_tokens, temperature, top_p], | |
| [chatbot_output, chat_history, msg, model_choice, system_message, max_new_tokens, temperature, top_p], | |
| ) | |
| msg.submit( | |
| chatbot, | |
| [msg, chat_history, model_choice, system_message, max_new_tokens, temperature, top_p], | |
| [chatbot_output, chat_history, msg, model_choice, system_message, max_new_tokens, temperature, top_p], | |
| ) | |
| # Launch the demo | |
| if __name__ == "__main__": | |
| demo.launch() |