Juna190825 commited on
Commit
4bdbba8
·
verified ·
1 Parent(s): cd7a4f3

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +78 -0
app.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import random
3
+ from fastapi import FastAPI
4
+ from fastapi.staticfiles import StaticFiles
5
+ from fastapi.responses import HTMLResponse
6
+ from fastapi.templating import Jinja2Templates
7
+ from gradio import Blocks, ChatInterface, mount_gradio_app
8
+ import uvicorn
9
+
10
+ from api.endpoints import router as api_router
11
+ from api.openrouter import OpenRouterClient
12
+
13
+ # Initialize FastAPI
14
+ app = FastAPI(title="OpenRouter AI Hub", description="Access multiple AI models via OpenRouter")
15
+
16
+ # Mount API endpoints
17
+ app.include_router(api_router, prefix="/api/v1")
18
+
19
+ # Serve static files
20
+ app.mount("/static", StaticFiles(directory="static"), name="static")
21
+
22
+ # Initialize templates
23
+ templates = Jinja2Templates(directory="templates")
24
+
25
+ # Initialize OpenRouter client
26
+ openrouter_client = OpenRouterClient(
27
+ api_key=os.getenv("OPENROUTER_API_KEY", "")
28
+ )
29
+
30
+ # Web interface with Gradio
31
+ def chat_with_model(message, history):
32
+ # Select a model based on message or randomly
33
+ if "code" in message.lower():
34
+ model = "openai/gpt-3.5-turbo" # Better for coding
35
+ elif "creative" in message.lower():
36
+ model = "anthropic/claude-2" # Better for creative writing
37
+ else:
38
+ models = [
39
+ "openai/gpt-3.5-turbo",
40
+ "anthropic/claude-instant-v1",
41
+ "google/palm-2-chat-bison",
42
+ "meta-llama/llama-2-13b-chat",
43
+ "mistralai/mistral-7b-instruct"
44
+ ]
45
+ model = random.choice(models)
46
+
47
+ # Get response from OpenRouter
48
+ response = openrouter_client.chat_completion(
49
+ model=model,
50
+ messages=[{"role": "user", "content": message}]
51
+ )
52
+
53
+ return response["choices"][0]["message"]["content"]
54
+
55
+ # Create Gradio interface
56
+ with Blocks() as demo:
57
+ ChatInterface(
58
+ chat_with_model,
59
+ title="OpenRouter AI Chat",
60
+ description="Chat with multiple AI models powered by OpenRouter",
61
+ examples=[
62
+ "Explain quantum computing in simple terms",
63
+ "Write a Python script to calculate Fibonacci sequence",
64
+ "Tell me a creative story about a robot learning to love"
65
+ ],
66
+ css=".gradio-container {max-width: 800px; margin: auto;}"
67
+ )
68
+
69
+ # Mount Gradio app
70
+ app = mount_gradio_app(app, demo, path="/")
71
+
72
+ # Root endpoint
73
+ @app.get("/", response_class=HTMLResponse)
74
+ async def read_root():
75
+ return templates.TemplateResponse("index.html", {"request": {}})
76
+
77
+ if __name__ == "__main__":
78
+ uvicorn.run(app, host="0.0.0.0", port=7860)