'''
Version: 2.0
Author: Yue Zhong
Date: 2024-12-26 16:08:12
Description: 
LastEditors: Yue Zhong
LastEditTime: 2025-01-03 17:05:30
'''

from pprint import pprint
from copy import deepcopy
from typing import Callable, List, Dict, Optional
from functools import lru_cache
import numpy as np

from langchain.output_parsers import ResponseSchema
from langchain.output_parsers import StructuredOutputParser

from langchain_core.example_selectors.base import BaseExampleSelector
from langchain.prompts import ChatPromptTemplate
from langchain_core.prompts.prompt import PromptTemplate
from langchain_core.prompts import FewShotPromptTemplate

# import sys, os
# sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from .constants import *

if use_open_ai:
    if openai_model_name == "deepseek-chat":
        from .tokenizer import deep_tokenizer
    else:
        import tiktoken
else:
    from .tokenizer import glm4_tokenizer
def load_env(env_path=env_path):
    from dotenv import load_dotenv
    load_dotenv(dotenv_path=env_path)

def responsellm(prompt, client) -> str:
    ai_msg = client.invoke(prompt)
    return ai_msg.content
def generate_prompt_template(prompt_template_str:str):
    return ChatPromptTemplate.from_template(prompt_template_str)
def getllm(model_name:str):
    assert model_name in LLM_MODELS, f"{model_name} not in LLM_MODELS"
    return LLM_MODELS[model_name]()
def set_llms(
    suggest_model: Optional[Callable] = None,
    knowledge_model: Optional[Callable] = None,
    embedding_model: Optional[Callable] = None,
):
    global LLM_MODELS
    if suggest_model is not None:
        LLM_MODELS["suggest"] = suggest_model
    if knowledge_model is not None:
        LLM_MODELS["knowledge"] = knowledge_model
    if embedding_model is not None:
        LLM_MODELS["embedding"] = embedding_model

def config2json(client, origin:str, response_schemas:List[ResponseSchema]) -> Dict:
    output_parser = StructuredOutputParser.from_response_schemas(response_schemas)
    format_instructions = output_parser.get_format_instructions()

    prompt_template = """\
        For the following text, extract the following information: 
        
        text: {text}

        {format_instructions}
        """
    prompt = ChatPromptTemplate.from_template(prompt_template)
    messages = prompt.format_messages(text=origin,format_instructions=format_instructions)
    # print(messages[0].content)
    resp = responsellm(messages,client)
    resp2 = resp.replace('\\', '\\\\')
    output_dict = output_parser.parse(resp2)
    return output_dict
@lru_cache(maxsize=2000)
def _token_count(text):
    if use_open_ai:
        if openai_model_name == "deepseek-chat":
            return len(deep_tokenizer.encode(text))
        else:
            try:
                encoding = tiktoken.encoding_for_model(openai_model_name)
            except KeyError:
                print("Warning: model not found. Using gpt-2 encoding.")
                encoding = tiktoken.get_encoding("gpt2")
            return len(encoding.encode(text))
    else:
        print("For GLM-4, default using glm_4_9b_chat_hf_tokenizer")
        return len(glm4_tokenizer.encode(text))

def token_count(texts):
    if isinstance(texts, str):
        return _token_count(texts)
    l = 0
    for text in texts:
        l += _token_count(text)
    return l

set_token_count_func(token_count)

class CustomExampleSelector(BaseExampleSelector):
    """
    A custom example selector that selects examples based on a custom function.
    mimic the length based example selector
    """
    def __init__(self, examples: List[Dict], example_prompt: PromptTemplate, max_length: int = 2048,
                 get_text_length: Callable[[str], int] = get_token_count_func(), method="down"):
        self.examples = examples
        self.example_prompt = example_prompt
        self.max_length = max_length
        self.get_text_length= get_text_length
        self.method = method
        self.calculate_text_length()

    def add_example(self, example):
        self.examples.append(example)
        string_example = self.example_prompt.format(**example)
        self.example_text_lengths.append(self.get_text_length(string_example))
    
    def select_examples(self, input_variables: Dict[str, str]) -> List[Dict]:
        """Select examples."""
        examples = []
        if self.method in ["down", "up", "random"]:
            examples = self.select_examples_by_length(input_variables)
        else:
            raise ValueError(f"method {self.method} not supported")
        return examples

    def calculate_text_length(self):
        """Calculate the existed length."""
        self.example_text_lengths = []
        if self.examples is None: return
        for example in self.examples:
            string_example = self.example_prompt.format(**example)
            self.example_text_lengths.append(self.get_text_length(string_example))
    
    def select_examples_by_length(self, input_variables: Dict[str, str]) -> List[Dict]:
        """Select examples."""
        method = self.method
        # pprint(self.examples)
        # pprint(input_variables)
        # pprint(input_variables.values())
        inputs = " ".join(input_variables.values())
        # pprint(inputs)
        remaining_length = self.max_length - self.get_text_length(inputs)
        # print(f'remaining_length: {remaining_length}')
        if remaining_length <= 0:
            examples = []
        elif remaining_length > sum(self.example_text_lengths):
            if method == "down":
                examples = self.examples[::-1]
            elif method == "up":
                examples = self.examples
            else:
                examples  = deepcopy(self.examples)
                np.random.shuffle(examples)
        elif method == "down":
            i = len(self.examples) - 1
            while remaining_length > 0 and i >= 0:
                new_length = remaining_length - self.example_text_lengths[i]
                if new_length < 0:
                    break
                else:
                    examples.append(self.examples[i])
                    remaining_length = new_length
                i -= 1
        elif method == "up":
            i = 0
            while remaining_length > 0 and i < len(self.examples):
                new_length = remaining_length - self.example_text_lengths[i]
                if new_length < 0:
                    break
                else:
                    examples.append(self.examples[i])
                    remaining_length = new_length
                i += 1
        elif method == "random":
            no_repeat = list(np.arange(len(self.examples)))
            while remaining_length > 0 and len(no_repeat) > 0:
                # 每一次随机选择一个example
                i = np.random.choice(no_repeat,size=1,replace=False)
                new_length = remaining_length - self.example_text_lengths[i]
                if new_length < 0:
                    break
                else:
                    examples.append(self.examples[i])
                    remaining_length = new_length
                no_repeat.remove(i)
            # examples = np.random.choice(self.examples, size=min(len(self.examples), remaining_length), replace=False)
        else:
            raise ValueError(f"method {method} not supported")
        # print(f"selected examples: {examples}")
        return examples

def random_example_selector(examples:List[str],number:int=5)->List[str]:
    len_examples = len(examples)
    if len_examples == 0: 
        new_examples = []
    elif len_examples < number:
        new_examples = deepcopy(examples)
        np.random.shuffle(new_examples)
    else:
        new_examples = np.random.choice(examples, size=number, replace=False)
    return new_examples

def generate_dynamic_prompt(examples, prefix:str, suffix:str, config_dict:Dict):
    prefix_token = get_token_count_func()(prefix.format(**config_dict))
    suffix_token = get_token_count_func()(suffix.format(**config_dict))
    example_prompt = PromptTemplate(
        input_variables=["input"],
        template="{input}",
    )
    example_selector = CustomExampleSelector(
        examples=[{"input": example} for example in examples],
        example_prompt=example_prompt,
        max_length=TOKEN_LIMIT
            - prefix_token
            - suffix_token
            - TOKEN_COMPLETION_LIMIT
            - RELAX_TOKEN,
        method="up"
    )
    dynamic_prompt = FewShotPromptTemplate(
        example_selector=example_selector,
        example_prompt=example_prompt,
        prefix=prefix,
        suffix=suffix,
        input_variables=list(config_dict.keys()),
    )
    return dynamic_prompt.format(**config_dict)