root commited on
Commit
712bb5f
Β·
1 Parent(s): 020b4bd

Initial commit: FastAPI application with health endpoints

Browse files
Files changed (4) hide show
  1. Dockerfile +10 -0
  2. README.md +30 -5
  3. app.py +122 -0
  4. requirements.txt +4 -0
Dockerfile ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9
2
+
3
+ WORKDIR /app
4
+
5
+ COPY requirements.txt .
6
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
7
+
8
+ COPY . .
9
+
10
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
README.md CHANGED
@@ -1,10 +1,35 @@
1
  ---
2
- title: Sheikh Llm
3
- emoji: πŸ“Š
4
- colorFrom: green
5
- colorTo: red
6
  sdk: docker
7
  pinned: false
8
  ---
9
 
10
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ title: Sheikh LLM
3
+ emoji: πŸš€
4
+ colorFrom: blue
5
+ colorTo: purple
6
  sdk: docker
7
  pinned: false
8
  ---
9
 
10
+ # Sheikh LLM Space
11
+
12
+ This is an automated FastAPI application deployed on Hugging Face Spaces.
13
+
14
+ ## Features
15
+ - FastAPI backend with auto-generated docs
16
+ - Docker deployment
17
+ - Health monitoring endpoints
18
+ - Ready for LLM integration
19
+
20
+ ## API Documentation
21
+ Visit `/docs` for interactive Swagger UI documentation.
22
+
23
+ ## Endpoints
24
+ - `GET /` - Homepage with UI
25
+ - `GET /health` - Health check
26
+ - `GET /api/status` - API status
27
+ - `POST /api/chat` - Chat endpoint
28
+
29
+ ## Local Development
30
+ ```bash
31
+ git clone git@hf.co:spaces/RecentCoders/sheikh-llm
32
+ cd sheikh-llm
33
+ pip install -r requirements.txt
34
+ uvicorn app:app --reload --port 7860
35
+ ```
app.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, HTTPException
2
+ from fastapi.responses import HTMLResponse, JSONResponse
3
+ from pydantic import BaseModel
4
+ import os
5
+
6
+ app = FastAPI(
7
+ title="Sheikh LLM API",
8
+ description="A powerful LLM API deployed on Hugging Face Spaces",
9
+ version="1.0.0"
10
+ )
11
+
12
+ class ChatRequest(BaseModel):
13
+ message: str
14
+ max_tokens: int = 100
15
+
16
+ class ChatResponse(BaseModel):
17
+ response: str
18
+ status: str
19
+
20
+ @app.get("/", response_class=HTMLResponse)
21
+ def home(): return """
22
+ <!DOCTYPE html>
23
+ <html>
24
+ <head>
25
+ <title>Sheikh LLM</title>
26
+ <style>
27
+ body {
28
+ font-family: Arial, sans-serif;
29
+ margin: 0;
30
+ padding: 20px;
31
+ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
32
+ color: white;
33
+ }
34
+ .container {
35
+ max-width: 800px;
36
+ margin: 0 auto;
37
+ background: rgba(255,255,255,0.1);
38
+ padding: 30px;
39
+ border-radius: 15px;
40
+ backdrop-filter: blur(10px);
41
+ }
42
+ .header {
43
+ text-align: center;
44
+ margin-bottom: 30px;
45
+ }
46
+ .endpoints {
47
+ background: rgba(255,255,255,0.2);
48
+ padding: 20px;
49
+ border-radius: 10px;
50
+ margin: 20px 0;
51
+ }
52
+ a { color: #ffd700; text-decoration: none; }
53
+ a:hover { text-decoration: underline; }
54
+ </style>
55
+ </head>
56
+ <body>
57
+ <div class="container">
58
+ <div class="header">
59
+ <h1>πŸš€ Sheikh LLM Space</h1>
60
+ <p>Welcome to your automated Hugging Face Space!</p>
61
+ </div>
62
+
63
+ <div class="endpoints">
64
+ <h2>πŸ“‘ API Endpoints:</h2>
65
+ <ul>
66
+ <li><a href="/health" target="_blank">GET /health</a> - Health check</li>
67
+ <li><a href="/api/status" target="_blank">GET /api/status</a> - API status</li>
68
+ <li><a href="/docs" target="_blank">GET /docs</a> - Interactive API documentation</li>
69
+ </ul>
70
+ </div>
71
+
72
+ <div class="endpoints">
73
+ <h2>⚑ Quick Test:</h2>
74
+ <p>Try this curl command to test the API:</p>
75
+ <code style="background: black; padding: 10px; display: block; border-radius: 5px;">
76
+ curl -X GET "https://recentcoders-sheikh-llm.hf.space/health"
77
+ </code>
78
+ </div>
79
+ </div>
80
+ </body>
81
+ </html>
82
+ """
83
+
84
+ @app.get("/health")
85
+ async def health_check():
86
+ return JSONResponse({
87
+ "status": "healthy",
88
+ "service": "sheikh-llm",
89
+ "version": "1.0.0",
90
+ "environment": "production"
91
+ })
92
+
93
+ @app.get("/api/status")
94
+ async def api_status():
95
+ return {
96
+ "service": "sheikh-llm",
97
+ "version": "1.0.0",
98
+ "status": "running",
99
+ "endpoints": [
100
+ {"path": "/", "method": "GET", "description": "Homepage"},
101
+ {"path": "/health", "method": "GET", "description": "Health check"},
102
+ {"path": "/api/status", "method": "GET", "description": "API status"},
103
+ {"path": "/docs", "method": "GET", "description": "Swagger UI"}
104
+ ]
105
+ }
106
+
107
+ @app.post("/api/chat", response_model=ChatResponse)
108
+ async def chat_endpoint(request: ChatRequest):
109
+ """Simple chat endpoint that echoes the message"""
110
+ if not request.message.strip():
111
+ raise HTTPException(status_code=400, detail="Message cannot be empty")
112
+
113
+ response_text = f"Received your message: '{request.message}'. This is from Sheikh LLM API!"
114
+
115
+ return ChatResponse(
116
+ response=response_text,
117
+ status="success"
118
+ )
119
+
120
+ if __name__ == "__main__":
121
+ import uvicorn
122
+ uvicorn.run(app, host="0.0.0.0", port=7860)
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ fastapi==0.104.1
2
+ uvicorn[standard]==0.24.0
3
+ pydantic==2.5.0
4
+ python-multipart==0.0.6