import asyncio
from enum import Enum

from openai import OpenAI
from fastapi import WebSocket

client = OpenAI(api_key="sk-1fca6bc24c8f4664bc2940e5ad04aab5", base_url="https://api.deepseek.com")
client_qw = OpenAI(api_key="sk-f1e4b5164b8b48da9ee7c51966ac9da0", base_url="https://api.openai.com/v1/chat/completions")
client_gpt = OpenAI(api_key="sk-tearDsu5Qr1knVu26aC97b4fF0544bEfB621E8184a8eEb00",
                    base_url="https://api.laozhang.ai/v1")


class AiApiConstants(Enum):
    DEEPSEEK_R1 = "deepseek-reasoner"
    DEEPSEEK_V3 = "deepseek-chat"
    DEEPSEEK_API_URL = "https://api.deepseek.com"
    API_ENDPOINT = "https://api.example.com"


async def invoke_openai_only_result(query: str, ws: WebSocket = None):
    messages = [{"role": "user", "content": query}]
    response = client.chat.completions.create(model="deepseek-reasoner", messages=messages)
    # 实时处理流式数据
    return response.choices[0].message.content


async def invoke_openai(query: str, ws: WebSocket):
    messages = [{"role": "user", "content": query}]
    response = client.chat.completions.create(
        model=AiApiConstants.DEEPSEEK_V3.value,
        messages=messages,
        stream=True
    )
    # 实时处理流式数据
    try:
        for chunk in response:
            # 处理推理过程（可能包含中间思考）
            # if chunk.choices[0].delta.reasoning_content:
            #     await ws.send_text(chunk.choices[0].delta.reasoning_content)
            #     await asyncio.sleep(0.1)
            # 处理最终回答内容
            if chunk.choices[0].delta.content:
                await ws.send_text(chunk.choices[0].delta.content)
                await asyncio.sleep(0.03)
    except Exception as e:
        print(f"\033[31mError:\033[0m {str(e)}")


async def invoke_laozhang_api(query: str, ws: WebSocket, model: str = "gpt-4o-mini"):
    completion = client_gpt.chat.completions.create(
        model=model,
        stream=True,
        messages=[
            {"role": "system", "content": "You are a helpful assistant."},
            {"role": "user", "content": query}
        ]
    )
    try:
        for chunk in completion:
            if chunk.choices[0].delta.content:
                print(chunk.choices[0].delta)
                await ws.send_text(chunk.choices[0].delta.content)
                await asyncio.sleep(0.03)
    except Exception as e:
        print(f"\033[31mError:\033[0m {str(e)}")


async def invoke_laozhangapi_only_result(query: str, model: str = "gpt-4o-mini"):
    messages = [{"role": "user", "content": query}]
    response = client_gpt.chat.completions.create(model=model, messages=messages)
    # 实时处理流式数据
    return response.choices[0].message.content
