# Install required packages import os import requests import gradio as gr groq_api_key = os.getenv("GROQ_API_KEY") # Define the URL for the Groq API endpoint url = "https://api.groq.com/openai/v1/chat/completions" # Set the headers for the API request headers = { "Authorization": f"Bearer {groq_api_key}" } # Function to interact with Groq API def chat_with_groq(user_input): body = { "model": "llama-3.1-8b-instant", "messages": [ {"role": "user", "content": user_input} ] } response = requests.post(url, headers=headers, json=body) if response.status_code == 200: return response.json()['choices'][0]['message']['content'] else: return f"Error: {response.json()}" # Create Gradio interface interface = gr.Interface( fn=chat_with_groq, inputs=gr.Textbox(lines=2, placeholder="Ask me anything..."), outputs=gr.Textbox(lines=20), title="Cohort 7 1st deployed app", description="Type your question below and get a response powered by Groq's Llama 3.1-8B model." ) # Launch Gradio app interface.launch()