import os.path

from fastapi import FastAPI, Query
from pydantic import BaseModel
import uvicorn

from core.knowledgeBase import KnowledgeBase
from core.chat import Chat

app = FastAPI()


class Route(BaseModel):
    file_name: str = None
    query: str = None
    openai_api_key: str
    page: int = 1
    size: int = 10
    template: str = None
    model: str = None
    embeddings: str = None


@app.post('/gpt/knowledge_base_upload/{action_type}')
def knowledge_base_upload(action_type: str, item: Route):
    model = KnowledgeBase(api_key=item.openai_api_key, model=item.model, embedings=item.embeddings)
    model.upload(file_path=item.file_name, action_type=action_type)
    return {'code': 200, 'message': '知识库操作成功'}


@app.post('/gpt/get_knowledge_base')
def get_knowledge_base(item: Route):
    model = KnowledgeBase(api_key=item.openai_api_key, model=item.model, embedings=item.embeddings)
    return {'code': 200, 'data': model.get_knowledge_base(file_path=item.file_name, page=item.page, size=item.size)}


@app.post('/gpt/knowledge_chat/{uid}')
def knowledge_chat(uid: str, item: Route):
    model = KnowledgeBase(api_key=item.openai_api_key, model=item.model, embedings=item.embeddings)
    return {'code': 200, 'message': model.invoke(item.query, uid, item.template)}


# 目前不需要
@app.get('/gpt/chat_log/{uid}')
def knowledge_chat_log(uid: str, page: int = Query(1), size: int = Query(10)):
    chat = Chat()
    return chat.chat_log(uid, page, size)


@app.get('/gpt/clear_chat_log/{uid}')
def clear_chat_log(uid: str):
    chat = Chat()
    chat.clear_chat_log(uid)
    return {'code': 200, 'message': '成功清空聊天'}


if __name__ == "__main__":
    directorys = ['cache_embeddings', 'vector_store/knowledge_base']
    for directory in directorys:
        if not os.path.exists(directory):
            os.makedirs(directory)
    uvicorn.run(app, host='0.0.0.0', port=9999)
