import os
import json
from biz.base.telchina_chat import Telchina_LocalContext_OpenAI, Telchina_LocalContext_OpenAI_Milvus
from openai import OpenAI
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from pymilvus import model, MilvusClient
from llama_index.llms.openllm import OpenLLM

from biz.data.sql_template.sql_template_rag import SqlTemplateRAG
from cache import MemoryCache
from biz.core.ai.prompts import SQL_ANALYSIS_PROMPT

load_dotenv(override=True)


class AuthConfig:
    """认证配置类"""
    # API认证令牌
    API_AUTH_TOKEN = "your-secret-token"
    
    # 是否启用API认证
    ENABLE_AUTH = False
    
    # 是否允许静态资源访问不需要认证
    # 设置为False时，静态页面也需要登录才能访问
    # 设置为True时，静态页面可以直接访问，只有API调用时才需要认证
    ALLOW_STATIC_WITHOUT_AUTH = True
    
    # 登录凭据配置（简单示例，生产环境应使用数据库）
    LOGIN_CREDENTIALS = {
        "admin": "admin123"
    }


class ASRConfig:
    """语音识别配置类"""
    # VOSK模型路径
    VOSK_MODEL_PATH = "D:/06git/01python/12wake_word/wake_word_simple/model/vosk-model-small-cn-0.22"
    
    # 唤醒词配置
    WAKE_WORDS = ["休息休息", "小溪小溪", "你好小溪", "小心小心"]
    
    # 标点符号配置
    PUNCTUATION = ["，", "。", "？", "！", "、", "；", " "]
    
    # 音频处理配置
    PROCESS_INTERVAL = 10  # 最大收集时间（秒）
    SILENCE_THRESHOLD = 100  # 静音阈值
    SILENCE_DURATION = 3   # 静音持续时间阈值
    
    # 服务器配置
    HOST = "0.0.0.0"
    PORT = 8000


class AIConfig:
    """AI模型配置类"""
    def __init__(self):
        self.cache = MemoryCache()
        self.vector_store_name = os.environ["VECTOR_STORE_NAME"]
        self.embedding_api_key = os.environ.get("EMBEDDING_API_KEY")
        self.embedding_base_url = os.environ.get("EMBEDDING_BASE_URL")
        self.embedding_model = os.environ.get("EMBEDDING_MODEL")
        
        # 初始化embedding function和milvus client
        self._init_embedding_function()
        self._init_config()
        self._init_clients()
        
        # 上下文管理
        self._command_context = None
        self._sql_context = None
        self._sql_function_context = None
    
    def _init_embedding_function(self):
        """初始化embedding function"""
        self.milvus_client = None
        if self.vector_store_name == "milvus":
            uri = os.environ['MILVUS_CLIENT_URL']
            db_name = os.environ.get('MILVUS_DB_NAME', "")
            self.ollama_ef = model.dense.OpenAIEmbeddingFunction(
                api_key=self.embedding_api_key,
                base_url=self.embedding_base_url,
                model_name=self.embedding_model,
                dimensions=768
            )
            print("db_name", db_name)
            self.milvus_client = MilvusClient(uri=uri, db_name=db_name)
        else:
            import chromadb.utils.embedding_functions as embedding_functions
            self.ollama_ef = embedding_functions.OpenAIEmbeddingFunction(
                api_key=self.embedding_api_key,
                api_base=self.embedding_base_url,
                model_name=self.embedding_model
            )
    
    def _init_config(self):
        """初始化配置字典"""
        self.config = {
            "model": os.environ['MODEL'],
            "temperature": float(os.environ['TEMPERATURE']),
            "max_tokens": int(os.environ['MAX_TOKENS']),
            "embedding_function": self.ollama_ef,
            "milvus_client": self.milvus_client,
            "sql_analysis_prompt": SQL_ANALYSIS_PROMPT,
        }
    
    def _init_clients(self):
        """初始化AI客户端"""
        self.client = OpenAI(
            api_key=os.environ['API_KEY'],
            base_url=os.environ['BASE_URL']
        )
        
        self.llm = ChatOpenAI(
            temperature=os.environ['TEMPERATURE'],
            openai_api_key=os.environ['API_KEY'],
            base_url=os.environ['BASE_URL'],
            stop=["Final Answer"],
            model_name=os.environ['MODEL']
        )
        
        self.openLLM = OpenLLM(
            model=os.environ['MODEL'],
            is_chat_model=True,
            temperature=os.environ['TEMPERATURE'], 
            api_key=os.environ['API_KEY'], 
            api_base=os.environ['BASE_URL'],
            timeout=120
        )
    
    def get_command_context(self):
        """获取命令上下文"""
        if self._command_context is None:
            if self.vector_store_name == "milvus":
                self._command_context = Telchina_LocalContext_OpenAI_Milvus(client=self.client, config=self.config)
            else:
                self._command_context = Telchina_LocalContext_OpenAI(client=self.client, config=self.config)
        return self._command_context
    
    def get_sql_context(self):
        """获取SQL上下文"""
        if self._sql_context is None:
            if self.vector_store_name == "milvus":
                self._sql_context = Telchina_LocalContext_OpenAI_Milvus(client=self.client, config=self.config)
            else:
                self._sql_context = Telchina_LocalContext_OpenAI(client=self.client, config=self.config)
            self._sql_context.connect_to_postgres()
        return self._sql_context
    
    def get_sql_function_context(self):
        """获取SQL函数上下文"""
        if self._sql_function_context is None:
            self._sql_function_context = SqlTemplateRAG(openLLM=self.openLLM, config=self.config)
        return self._sql_function_context
# os.environ["LANGCHAIN_TRACING_V2"] = "true"
# os.environ["LANGCHAIN_API_KEY"] = "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"

# os.environ["TRACELOOP_API_KEY"] = "tl_95439d07695643ffb2945fa9184cd2de"
# from traceloop.sdk import Traceloop
# Traceloop.init()

# from llama_index.core import set_global_handler, global_handler
# from llama_index.core import Settings
# os.environ["OPIK_API_KEY"] = "D45YEh0efBQhqzOHR452ZiZ6Q"
# os.environ["OPIK_WORKSPACE"] = "yuklcool"
# set_global_handler("opik")
# opik_callback_handler = global_handler
# Settings.llm = None

# 全局配置实例
ai_config = AIConfig()

# 为了向后兼容，保留原有的全局变量和函数
cache = ai_config.cache
config = ai_config.config
client = ai_config.client
llm = ai_config.llm
openLLM = ai_config.openLLM

def get_command_context():
    return ai_config.get_command_context()

def get_sql_context():
    return ai_config.get_sql_context()

def get_sql_function_context():
    return ai_config.get_sql_function_context()

class Router:
    _instance = None
    page = None

    def __new__(cls, config_file='router_page.json'):
        # 如果实例不存在，创建一个新的实例
        if not cls._instance:
            cls._instance = super(Router, cls).__new__(cls)
            cls._instance.load_config(config_file)
        return cls._instance

    def load_config(self, config_file):
        """ 读取配置文件并将内容保存在类实例的 page 属性中 """
        try:
            with open(config_file, 'r', encoding='utf-8') as file:
                data = json.load(file)
            self.page = data
        except FileNotFoundError:
            print(f"配置文件 {config_file} 未找到！")
            self.page = []
        except json.JSONDecodeError:
            print(f"配置文件 {config_file} 格式有误！")
            self.page = []
        except Exception as e:
            print(f"读取配置文件时出错: {e}")
            self.page = []