Spaces:
Sleeping
Sleeping
| import os | |
| import random | |
| from fastapi import FastAPI | |
| from fastapi.staticfiles import StaticFiles | |
| from fastapi.responses import HTMLResponse | |
| from fastapi.templating import Jinja2Templates | |
| from gradio import Blocks, ChatInterface, mount_gradio_app | |
| import uvicorn | |
| from api.endpoints import router as api_router | |
| from api.openrouter import OpenRouterClient | |
| # Initialize FastAPI | |
| app = FastAPI(title="OpenRouter AI Hub", description="Access multiple AI models via OpenRouter") | |
| # Mount API endpoints | |
| app.include_router(api_router, prefix="/api/v1") | |
| # Serve static files | |
| app.mount("/static", StaticFiles(directory="static"), name="static") | |
| # Initialize templates | |
| templates = Jinja2Templates(directory="templates") | |
| # Initialize OpenRouter client | |
| openrouter_client = OpenRouterClient( | |
| api_key=os.getenv("OPENROUTER_API_KEY", "") | |
| ) | |
| # Web interface with Gradio | |
| def chat_with_model(message, history): | |
| # Select a model based on message or randomly | |
| if "code" in message.lower(): | |
| model = "openai/gpt-3.5-turbo" # Better for coding | |
| elif "creative" in message.lower(): | |
| model = "anthropic/claude-2" # Better for creative writing | |
| else: | |
| models = [ | |
| "openai/gpt-3.5-turbo", | |
| "anthropic/claude-instant-v1", | |
| "google/palm-2-chat-bison", | |
| "meta-llama/llama-2-13b-chat", | |
| "mistralai/mistral-7b-instruct" | |
| ] | |
| model = random.choice(models) | |
| # Get response from OpenRouter | |
| response = openrouter_client.chat_completion( | |
| model=model, | |
| messages=[{"role": "user", "content": message}] | |
| ) | |
| return response["choices"][0]["message"]["content"] | |
| # Create Gradio interface | |
| with Blocks() as demo: | |
| ChatInterface( | |
| chat_with_model, | |
| title="OpenRouter AI Chat", | |
| description="Chat with multiple AI models powered by OpenRouter", | |
| examples=[ | |
| "Explain quantum computing in simple terms", | |
| "Write a Python script to calculate Fibonacci sequence", | |
| "Tell me a creative story about a robot learning to love" | |
| ], | |
| css=".gradio-container {max-width: 800px; margin: auto;}" | |
| ) | |
| # Mount Gradio app | |
| app = mount_gradio_app(app, demo, path="/") | |
| # Root endpoint | |
| async def read_root(): | |
| return templates.TemplateResponse("index.html", {"request": {}}) | |
| if __name__ == "__main__": | |
| uvicorn.run(app, host="0.0.0.0", port=7860) |