File size: 2,688 Bytes
c39b1f0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8a702cc
 
 
 
c39b1f0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ab343a9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
from fastapi import FastAPI, HTTPException, Body
from pydantic import BaseModel
from typing import List, Optional
from model_handler import ModelHandler
import uvicorn
from pathlib import Path
import json

app = FastAPI(title="Luna Chat API")
model_handler = ModelHandler()

class Message(BaseModel):
    role: str
    content: str

class ChatRequest(BaseModel):
    messages: List[Message]
    temperature: Optional[float] = 0.7

    class Config:
        schema_extra = {
            "example": {
                "messages": [{"role": "user", "content": "Hello"}],
                "temperature": 0.7
            }
        }

    def validate_temperature(self):
        if self.temperature < 0.1 or self.temperature > 2.0:
            raise ValueError("Temperature must be between 0.1 and 2.0")

class ChatResponse(BaseModel):
    response: str
    history: List[Message]

class ExportRequest(BaseModel):
    history: List[Message]
    filename: Optional[str] = None

class ImportRequest(BaseModel):
    filepath: str

@app.post("/chat", response_model=ChatResponse)
async def chat(request: ChatRequest):
    try:
        request.validate_temperature()
        response = model_handler.generate_response(
            [msg.dict() for msg in request.messages],
            temperature=request.temperature
        )
        
        # Assuming ChatResponse expects the assistant's reply only
        assistant_response = next((msg["content"] for msg in response if msg["role"] == "assistant"), "")
        history = [Message(**msg) for msg in response]
        return ChatResponse(response=assistant_response, history=history)
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))

@app.post("/export")
async def export_chat(request: ExportRequest):
    try:
        filepath = model_handler.save_chat_history(
            [msg.dict() for msg in request.history],
            request.filename
        )
        return {"status": "success", "filepath": filepath}
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))

@app.post("/import")
async def import_chat(request: ImportRequest):
    try:
        history = model_handler.load_chat_history(request.filepath)
        return {"status": "success", "history": history}
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))

@app.get("/model/status")
async def model_status():
    return {
        "status": "loaded" if model_handler.model is not None else "not_loaded",
        "model_hash": model_handler.get_model_hash()
    }

if __name__ == "__main__":
    uvicorn.run(app, host="0.0.0.0", port=8000, reload=True)  # Ensure port is 8000