from enum import Enum

from langchain_core.documents import Document
from loguru import logger
from pydantic import BaseModel
from typing import Optional, Dict, List, Any
from requests import Request

from ai_engine.common.ai_common import trace_context
from ai_engine.load_config import SysConfig


class LlmApiType(Enum):
    """大模型API类型"""
    OPENAI = "openai"
    VOLCENGINEARK = "volcengineArk"
    QIANFAN = "qianfan"
    OLLAMA = "ollama"


class InsertDocumentReq(BaseModel):
    kwargs: Dict
    docs: List[Document]


class DocBuilder(BaseModel):
    def __call__(self, hit: Dict) -> Document:
        return Document(
            page_content=hit["_source"]["metadata"]["originalText"],
            metadata=hit["_source"]["metadata"],
        )


class ModelKwargs(BaseModel):
    """模型参数"""
    api_base: str = None
    api_type: str = None
    api_key: str = None
    api_secret_key: str = None
    api_version: str = None
    model_name: str = None
    embedding_model_name: str = None


class RequestHeader(BaseModel):
    http_request: Any = None
    model_kwargs: ModelKwargs = None
    request_id: str = None

    def __init__(self, http_request: Request,
                 request_id: str,
                 model_name: str = None,
                 embedding_model_name: str = None,
                 **data: Any):
        super().__init__(**data)

        if http_request is None:
            raise ValueError("http_request is required")
        if request_id is not None:
            trace_context.set(request_id)
        self.http_request = http_request
        self.request_id = request_id

        headers = http_request.headers
        apiUrl = str(http_request.url)
        apiType = headers.get("apiType")
        apiKey = headers.get("apiKey")
        apiSecretKey = headers.get("apiSecretKey")
        apiBase = headers.get("apiBase")
        apiVersion = headers.get("apiVersion")

        logger.info("-------------------------------------------")
        logger.info("apiUrl: {}", apiUrl)
        logger.info("apiType: {}", apiType)
        logger.info("apiBase: {}", apiBase)
        logger.info("apiKey: {}", apiKey)
        logger.info("apiSecretKey: {}", apiSecretKey)
        logger.info("apiVersion: {}", apiVersion)
        logger.info("model_name: {}", model_name)
        logger.info("embedding_model_name: {}", embedding_model_name)
        logger.info("-------------------------------------------")

        if apiType not in ["volcengineArk"]:
            raise ValueError(f"apiType error,Only supports volcengineArk")
        model_config = {
            "access_key_id": "",
            "secret_access_key": "",
            "chat_model": "",
            "embedding_model": ""
        }
        if apiType is None:
            apiType = LlmApiType.VOLCENGINEARK.value
        if apiType == LlmApiType.QIANFAN.value:
            model_config = SysConfig.qianfan
        if apiType == LlmApiType.OPENAI.value:
            model_config = SysConfig.openai
        if apiType == LlmApiType.VOLCENGINEARK.value:
            model_config = SysConfig.volcengine_ark
        # 设置默认参数
        if apiKey is None:
            apiKey = model_config["access_key_id"]
        if apiSecretKey is None:
            apiKey = model_config["secret_access_key"]
        if model_name is None:
            model_name = model_config["chat_model"]
        if embedding_model_name is None:
            embedding_model_name = model_config["embedding_model"]

        self.model_kwargs = ModelKwargs(api_type=apiType,
                                        api_base=apiBase,
                                        api_key=apiKey,
                                        api_version=apiVersion,
                                        api_secret_key=apiSecretKey,
                                        model_name=model_name,
                                        embedding_model_name=embedding_model_name)

    def get_model_kwargs(self) -> ModelKwargs:
        return self.model_kwargs
