Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import cohere | |
| import os | |
| st.set_page_config(page_title="Cohere Chat", layout="wide") | |
| AI_PFP = "media/pfps/cohere-pfp.png" | |
| USER_PFP = "media/pfps/user-pfp.jpg" | |
| BANNER = "media/banner.png" | |
| if not os.path.exists(AI_PFP) or not os.path.exists(USER_PFP): | |
| st.error("Missing profile pictures in media/pfps directory") | |
| st.stop() | |
| model_info = { | |
| "command-a-03-2025": { | |
| "description": "Command A is our most performant model to date, excelling at tool use, agents, retrieval augmented generation (RAG), and multilingual use cases.", | |
| "context": "256K", | |
| "output": "8K" | |
| }, | |
| "command-r7b-12-2024": { | |
| "description": "Small, fast update excelling at RAG, tool use, and complex reasoning tasks.", | |
| "context": "128K", | |
| "output": "4K" | |
| }, | |
| "command-r-plus-04-2024": { | |
| "description": "Instruction-following model for complex RAG workflows and multi-step tool use.", | |
| "context": "128K", | |
| "output": "4K" | |
| }, | |
| "command-r-plus": { | |
| "description": "Alias for command-r-plus-04-2024.", | |
| "context": "128K", | |
| "output": "4K" | |
| }, | |
| "command-r-08-2024": { | |
| "description": "Updated Command R model from August 2024.", | |
| "context": "128K", | |
| "output": "4K" | |
| }, | |
| "command-r-03-2024": { | |
| "description": "Instruction-following model for code generation, RAG, and agents.", | |
| "context": "128K", | |
| "output": "4K" | |
| }, | |
| "command-r": { | |
| "description": "Alias for command-r-03-2024.", | |
| "context": "128K", | |
| "output": "4K" | |
| }, | |
| "command": { | |
| "description": "Conversational model with long context capabilities.", | |
| "context": "4K", | |
| "output": "4K" | |
| }, | |
| "command-nightly": { | |
| "description": "Experimental nightly build (not for production).", | |
| "context": "128K", | |
| "output": "4K" | |
| }, | |
| "command-light": { | |
| "description": "Faster lightweight version of command.", | |
| "context": "4K", | |
| "output": "4K" | |
| }, | |
| "command-light-nightly": { | |
| "description": "Experimental nightly build of command-light.", | |
| "context": "128K", | |
| "output": "4K" | |
| } | |
| } | |
| with st.sidebar: | |
| st.image(BANNER, use_container_width=True) | |
| st.title("Settings") | |
| theme = st.selectbox("Theme", ["Light", "Dark"], index=0) | |
| if theme == "Dark": | |
| st.markdown( | |
| """ | |
| <style> | |
| /* Streamlit dark mode override */ | |
| .css-18ni7ap, .css-1d391kg, .css-12oz5g7 { background-color: #2e2e2e !important; color: #f0f0f0 !important; } | |
| body { background-color: #1e1e1e !important; color: #f0f0f0 !important; } | |
| </style> | |
| """, unsafe_allow_html=True | |
| ) | |
| st.markdown("# Cohere Labs") | |
| st.markdown("Hugging Face 🤗 Community UI") | |
| st.title("Settings") | |
| api_key = st.text_input("Cohere API Key", type="password") | |
| selected_model = st.selectbox("Model", options=list(model_info.keys())) | |
| st.divider() | |
| st.image(AI_PFP, width=60) | |
| st.subheader(selected_model) | |
| st.markdown(model_info[selected_model]["description"]) | |
| st.caption(f"Context: {model_info[selected_model]['context']}") | |
| st.caption(f"Output: {model_info[selected_model]['output']}") | |
| st.title(f"Chat - {selected_model}") | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [] | |
| for msg in st.session_state.messages: | |
| with st.chat_message(msg["role"], avatar=USER_PFP if msg["role"] == "user" else AI_PFP): | |
| st.markdown(msg["content"]) | |
| if prompt := st.chat_input("Message..."): | |
| if not api_key: | |
| st.error("API key required") | |
| st.stop() | |
| st.session_state.messages.append({"role": "user", "content": prompt}) | |
| with st.chat_message("user", avatar=USER_PFP): | |
| st.markdown(prompt) | |
| try: | |
| co = cohere.ClientV2(api_key) | |
| with st.chat_message("assistant", avatar=AI_PFP): | |
| response = co.chat( | |
| model=selected_model, | |
| messages=st.session_state.messages | |
| ) | |
| if hasattr(response, "message") and hasattr(response.message, "content"): | |
| content_items = response.message.content | |
| reply = "".join(getattr(item, 'text', '') for item in content_items) | |
| else: | |
| st.write(response) | |
| reply = "❗️Couldn't extract reply from the Cohere response." | |
| st.markdown(reply) | |
| st.session_state.messages.append({"role": "assistant", "content": reply}) | |
| except Exception as e: | |
| st.error(f"Error: {str(e)}") | |