"""
Copyright (c) 2025
Licensed under the MIT License.
See LICENSE file in the project root for full license information.

AI Chat Server - 基于天翼云息壤一体化智算服务平台的大模型对话应用
"""
import json
import requests
import asyncio
from enum import Enum
from typing import List, Dict
from sseclient import SSEClient
from fastapi import FastAPI, WebSocket, WebSocketDisconnect
from fastapi.responses import HTMLResponse
import uvicorn
from fastapi.middleware.cors import CORSMiddleware
import time
import aiohttp
import os
from dotenv import load_dotenv

# 加载环境变量
load_dotenv()

# 从环境变量获取API密钥
API_KEY = os.getenv("CTYUN_API_KEY")
if not API_KEY:
    raise ValueError("请设置环境变量 CTYUN_API_KEY")

class Model(Enum):
    """支持的模型枚举"""
    DEEPSEEK_R1 = ("DeepSeek-R1-英伟达版", "7ba7726dad4c4ea4ab7f39c7741aea68")
    DEEPSEEK_V3 = ("DeepSeek-V3-昇腾版", "9dc913a037774fc0b248376905c85da5")
    QWEN_72B = ("Qwen2.5-72B-Instruct", "d9df728b30a346afb74d2099b6c209aa")
    BAICHUAN2 = ("Baichuan2-Turbo", "43ac83747cb34730a00b7cfe590c89ac")
    TELECHAT = ("TeleChat-12B", "fdc31b36028043c48b15131885b148ce")

    def __init__(self, display_name: str, model_id: str):
        self.display_name = display_name
        self.model_id = model_id


class LLMClient:
    """
    天翼云大模型客户端封装

    注意：需要申请官方API密钥后方可使用
    申请地址：https://huiju.ctyun.cn/service/serviceGroup?regionId=200000001852

    使用方法:
    ```python
    client = LLMClient(api_key="your_api_key")
    async for response in client.stream(messages=[{"role": "user", "content": "你好"}]):
        print(response)
    ```
    """
    def __init__(self, api_key: str = API_KEY, model: Model = Model.DEEPSEEK_R1):
        self.headers = {
            "Content-Type": "application/json",
            "Authorization": f"Bearer {api_key}"
        }
        self.url = "https://wishub-x1.ctyun.cn/v1/chat/completions"
        self.model = model
        self.session = None  # 添加 session 属性

    async def stream(self, messages: List[Dict], message_id: str):
        """异步流式请求方法"""
        if not self.session:
            self.session = aiohttp.ClientSession()

        try:
            async with self.session.post(
                    self.url,
                    headers=self.headers,
                    json={
                        "model": self.model.model_id,
                        "messages": messages,
                        "stream": True,
                        "temperature": 0.7,
                        "max_tokens": 2000
                    }
            ) as response:
                if response.status == 200:
                    accumulated_content = ""
                    async for line in response.content:
                        line = line.decode('utf-8')
                        if line.startswith('data:'):
                            json_str = line[5:].strip()
                            if json_str == '[DONE]':
                                continue

                            try:
                                data_json = json.loads(json_str)
                                if "choices" in data_json and data_json["choices"]:
                                    delta = data_json["choices"][0].get("delta", {})
                                    content = delta.get("content", "")

                                    if content:
                                        accumulated_content += content
                                        yield {
                                            "content": content,
                                            "messageId": message_id,
                                            "type": "stream"
                                        }
                            except json.JSONDecodeError:
                                continue
                else:
                    yield {
                        "content": f"API请求失败: HTTP {response.status}",
                        "messageId": message_id,
                        "type": "error"
                    }
        except Exception as e:
            yield {
                "content": f"API请求异常: {str(e)}",
                "messageId": message_id,
                "type": "error"
            }

    async def __aenter__(self):
        """支持异步上下文管理器"""
        self.session = aiohttp.ClientSession()
        return self

    async def __aexit__(self, exc_type, exc_val, exc_tb):
        """关闭 session"""
        if self.session:
            await self.session.close()


# 从templates包中导入HTML模板
from templates.html import HTML_TEMPLATE

app = FastAPI(
    title="AI Chat Server",
    description="基于天翼云息壤一体化智算服务平台的大模型对话应用",
    version="1.0.0"
)

app.add_middleware(
    CORSMiddleware,
    allow_origins=["*"],
    allow_credentials=True,
    allow_methods=["*"],
    allow_headers=["*"],
)


# 初始化模型客户端
llm_clients = {
    'DEEPSEEK_R1': LLMClient(model=Model.DEEPSEEK_R1),
    'DEEPSEEK_V3': LLMClient(model=Model.DEEPSEEK_V3),
    'QWEN_72B': LLMClient(model=Model.QWEN_72B),
    'BAICHUAN2': LLMClient(model=Model.BAICHUAN2),
    'TELECHAT': LLMClient(model=Model.TELECHAT)
}


@app.get("/", response_class=HTMLResponse)
async def get_chat_page():
    return HTML_TEMPLATE

@app.websocket("/ws/chat")
async def websocket_endpoint(websocket: WebSocket):
    await websocket.accept()
    print("WebSocket 连接已建立")

    async def handle_model_response(client, messages, message_id, model_name):
        start_time = time.time()
        try:
            async for content in client.stream(messages, message_id):
                await websocket.send_json(content)
                await asyncio.sleep(0.01)

            elapsed_time = time.time() - start_time
            await websocket.send_json({
                "type": "done",
                "messageId": message_id,
                "metrics": {"time": elapsed_time}
            })
        except Exception as e:
            await websocket.send_json({
                "content": f"{model_name} 处理失败: {str(e)}",
                "messageId": message_id,
                "type": "error"
            })

    try:
        while True:
            data = await websocket.receive_json()
            messages = [{"role": "user", "content": data["message"]}]
            message_id = data.get("messageId", "default")
            model_name = data["model"]

            try:
                client = llm_clients[model_name]
                await websocket.send_json({
                    "type": "thinking",
                    "messageId": message_id
                })
                await handle_model_response(client, messages, message_id, model_name)
            except KeyError:
                await websocket.send_json({
                    "content": f"无效的模型: {model_name}",
                    "messageId": message_id,
                    "type": "error"
                })

    except WebSocketDisconnect:
        print("WebSocket 连接已关闭")
    except Exception as e:
        print(f"WebSocket 错误: {str(e)}")


if __name__ == "__main__":
    import logging

    logging.basicConfig(level=logging.INFO)

    port = int(os.getenv("PORT", "12000"))
    host = os.getenv("HOST", "0.0.0.0")

    print("=== AI Chat Server ===")
    print("支持的模型:")
    for model in Model:
        print(f"- {model.display_name}")
    print(f"\n服务器启动于 http://{host}:{port}")

    uvicorn.run(app, host=host, port=port, log_level="info")