Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import requests | |
| import os | |
| from typing import List, Tuple | |
| # Groq API Configuration | |
| API_URL = "https://api.groq.com/openai/v1/chat/completions" | |
| API_KEY = os.getenv("GROQ_API_KEY") # This will read from HF Secrets | |
| # Debug info (will show in HF Spaces logs) | |
| print(f"π Groq API Key Status: {'β Found' if API_KEY else 'β Missing'}") | |
| if API_KEY: | |
| print(f"π API Key Preview: {API_KEY[:8]}...") | |
| headers = { | |
| "Authorization": f"Bearer {API_KEY}", | |
| "Content-Type": "application/json" | |
| } | |
| # Groq Models | |
| MODELS = { | |
| "llama3-8b-8192": "Llama 3 8B β‘ (Fastest)", | |
| "llama3-70b-8192": "Llama 3 70B π§ (Smartest)", | |
| "mixtral-8x7b-32768": "Mixtral 8x7B βοΈ (Balanced)", | |
| "gemma-7b-it": "Gemma 7B π (Google)" | |
| } | |
| def query_groq(message: str, history: List[Tuple[str, str]], model: str) -> str: | |
| """Query Groq API with comprehensive error handling""" | |
| # Check API key | |
| if not API_KEY: | |
| return """β **API Key Not Found** | |
| Please set up your API key in Hugging Face Spaces: | |
| 1. Go to your Space's **Settings** | |
| 2. Click **Repository secrets** | |
| 3. Add new secret: | |
| - Name: `GROQ_API_KEY` | |
| - Value: Your Groq API key | |
| 4. Restart the Space | |
| Get your free API key at: [console.groq.com](https://console.groq.com)""" | |
| try: | |
| # Build conversation history | |
| messages = [{"role": "system", "content": "You are a helpful AI assistant powered by Groq's lightning-fast inference."}] | |
| # Add chat history | |
| for user_msg, bot_msg in history: | |
| if user_msg and bot_msg: | |
| messages.extend([ | |
| {"role": "user", "content": user_msg}, | |
| {"role": "assistant", "content": bot_msg} | |
| ]) | |
| # Add current message | |
| messages.append({"role": "user", "content": message}) | |
| # Make API request | |
| response = requests.post( | |
| API_URL, | |
| headers=headers, | |
| json={ | |
| "model": model, | |
| "messages": messages, | |
| "temperature": 0.7, | |
| "max_tokens": 2048, | |
| "stream": False, | |
| "stop": None | |
| }, | |
| timeout=30 | |
| ) | |
| # Handle HTTP errors | |
| if response.status_code == 401: | |
| return "β **Authentication Failed**\nInvalid API key. Please check your GROQ_API_KEY secret." | |
| elif response.status_code == 403: | |
| return "β **Access Forbidden**\nAPI key doesn't have permission." | |
| elif response.status_code == 429: | |
| return "β **Rate Limited**\nToo many requests. Groq has generous limits, this should be rare." | |
| elif response.status_code == 500: | |
| return "β **Server Error**\nGroq server issue. Please try again." | |
| elif response.status_code == 503: | |
| return "β **Service Unavailable**\nGroq is temporarily down. Try again in a moment." | |
| response.raise_for_status() | |
| # Parse response | |
| result = response.json() | |
| if "choices" in result and len(result["choices"]) > 0: | |
| content = result["choices"][0]["message"]["content"] | |
| return content.strip() | |
| else: | |
| return f"β **Unexpected Response Format**\n```json\n{result}\n```" | |
| except requests.exceptions.Timeout: | |
| return "β **Timeout**\nRequest took too long (>30s). Try again." | |
| except requests.exceptions.ConnectionError: | |
| return "β **Connection Error**\nCannot connect to Groq API. Check internet connection." | |
| except requests.exceptions.RequestException as e: | |
| return f"β **Request Error**\n{str(e)}" | |
| except Exception as e: | |
| return f"β **Unexpected Error**\n{str(e)}" | |
| def test_api_connection(): | |
| """Test API connection and return status""" | |
| if not API_KEY: | |
| return "π΄ API Key Missing" | |
| try: | |
| response = requests.post( | |
| API_URL, | |
| headers=headers, | |
| json={ | |
| "model": "llama3-8b-8192", | |
| "messages": [{"role": "user", "content": "test"}], | |
| "max_tokens": 5 | |
| }, | |
| timeout=10 | |
| ) | |
| if response.status_code == 200: | |
| return "π’ Connected Successfully" | |
| else: | |
| return f"π΄ HTTP {response.status_code}" | |
| except Exception as e: | |
| return f"π΄ Connection Failed: {str(e)[:50]}" | |
| def create_gradio_interface(): | |
| # Test connection at startup | |
| connection_status = test_api_connection() | |
| # Custom CSS for better UI | |
| css = """ | |
| .gradio-container { | |
| max-width: 800px !important; | |
| margin: auto !important; | |
| } | |
| .chat-message { | |
| padding: 10px !important; | |
| } | |
| """ | |
| with gr.Blocks( | |
| title="β‘ Groq AI Chat", | |
| theme=gr.themes.Soft(primary_hue="blue"), | |
| css=css | |
| ) as demo: | |
| # Header | |
| gr.Markdown(""" | |
| # β‘ Groq AI Chat | |
| ### Lightning-fast AI responses powered by Groq's LPUβ’ | |
| """) | |
| # Status indicator | |
| with gr.Row(): | |
| gr.Markdown(f"**Connection Status:** {connection_status}") | |
| # Model selector | |
| with gr.Row(): | |
| model_dropdown = gr.Dropdown( | |
| choices=list(MODELS.keys()), | |
| value="llama3-8b-8192", | |
| label="π€ Select AI Model", | |
| info="Choose the model that best fits your needs" | |
| ) | |
| # Chat interface | |
| chatbot = gr.Chatbot( | |
| label="π¬ Chat History", | |
| height=500, | |
| bubble_full_width=False, | |
| show_copy_button=True | |
| ) | |
| # Input area | |
| with gr.Row(): | |
| msg_textbox = gr.Textbox( | |
| label="βοΈ Your Message", | |
| placeholder="Ask me anything... Groq responses are incredibly fast!", | |
| scale=4, | |
| lines=2, | |
| max_lines=5 | |
| ) | |
| send_button = gr.Button("Send β‘", variant="primary", scale=1) | |
| # Control buttons | |
| with gr.Row(): | |
| clear_button = gr.Button("ποΈ Clear Chat", variant="secondary") | |
| # Chat logic | |
| def respond(message: str, chat_history: List[List[str]], model: str): | |
| if not message.strip(): | |
| return "", chat_history | |
| # Convert gradio format to API format | |
| history_tuples = [(h[0], h[1]) for h in chat_history if len(h) >= 2] | |
| # Get AI response | |
| bot_message = query_groq(message.strip(), history_tuples, model) | |
| # Update chat history | |
| chat_history.append([message, bot_message]) | |
| return "", chat_history | |
| # Event handlers | |
| msg_textbox.submit( | |
| respond, | |
| inputs=[msg_textbox, chatbot, model_dropdown], | |
| outputs=[msg_textbox, chatbot] | |
| ) | |
| send_button.click( | |
| respond, | |
| inputs=[msg_textbox, chatbot, model_dropdown], | |
| outputs=[msg_textbox, chatbot] | |
| ) | |
| clear_button.click( | |
| lambda: [], | |
| outputs=chatbot | |
| ) | |
| # Example prompts | |
| gr.Examples( | |
| examples=[ | |
| ["Hello! What makes Groq special?"], | |
| ["Explain quantum computing in simple terms"], | |
| ["Write a Python function to find prime numbers"], | |
| ["What's the difference between AI, ML, and Deep Learning?"], | |
| ["Help me debug this error: 'TypeError: 'str' object is not callable'"], | |
| ["Create a simple REST API with FastAPI"], | |
| ["Explain the concept of recursion with examples"], | |
| ["What are the latest trends in web development?"] | |
| ], | |
| inputs=msg_textbox, | |
| label="π‘ Try these examples:" | |
| ) | |
| # Information accordion | |
| with gr.Accordion("βΉοΈ About Groq & Models", open=False): | |
| gr.Markdown(f""" | |
| **π Why Groq is Amazing:** | |
| - β‘ **Fastest AI inference** in the world (500+ tokens/second) | |
| - π **Generous free tier** with high rate limits | |
| - π **Real-time responses** feel like magic | |
| - π― **Consistent quality** across all models | |
| **π€ Available Models:** | |
| {chr(10).join([f"- **{model}**: {desc}" for model, desc in MODELS.items()])} | |
| **π§ Technical Details:** | |
| - Powered by Groq's custom LPUβ’ (Language Processing Unit) | |
| - Optimized for transformer model inference | |
| - Supports context windows up to 32K tokens | |
| - Enterprise-grade reliability and security | |
| """) | |
| # Footer | |
| gr.Markdown(""" | |
| --- | |
| **π Setup Instructions:** | |
| 1. Get free API key: [console.groq.com](https://console.groq.com) | |
| 2. Add to HF Spaces: Settings β Repository secrets β `GROQ_API_KEY` | |
| 3. Restart your Space and enjoy lightning-fast AI! | |
| **π‘ Tips:** | |
| - Groq excels at coding, explanations, and creative tasks | |
| - Try different models to see which fits your use case | |
| - The speed difference is immediately noticeable! | |
| """) | |
| return demo | |
| # Create and launch the interface | |
| if __name__ == "__main__": | |
| print("π Initializing Groq Chat Interface...") | |
| print(f"π API Key Status: {'β Ready' if API_KEY else 'β Please add GROQ_API_KEY to secrets'}") | |
| demo = create_gradio_interface() | |
| demo.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| show_error=True, | |
| show_api=False, # Hide API docs for cleaner interface | |
| favicon_path=None | |
| ) |