from pprint import pprint
from typing import List

from langchain import PromptTemplate
from langchain.chains import LLMChain
from langchain.llms import OpenAI
from typing import (
    Callable,
    Any,
)
from langchain.graphs.networkx_graph import KG_TRIPLE_DELIMITER


def get_OpenAI(
        model_name: str,
        temperature: float,
        max_tokens: int = None,
        streaming: bool = False,
        echo: bool = False,
        callbacks: List[Callable] = [],
        verbose: bool = False,
        **kwargs: Any,
) -> OpenAI:
    config = get_model_worker_config(model_name)
    if model_name == "openai-api":
        model_name = config.get("model_name")
    api_key = config['api_key']
    api_base = config['api_base_url']

    model = OpenAI(
        streaming=streaming,
        verbose=verbose,
        callbacks=callbacks,
        openai_api_key=api_key,
        openai_api_base=api_base,
        model_name=model_name,
        temperature=temperature,
        max_tokens=max_tokens,
        openai_proxy=config.get("openai_proxy"),
        echo=echo,
        **kwargs
    )
    return model


def get_model_worker_config(model_name: str = None) -> dict:
    from configs.model_config import ONLINE_LLM_MODEL
    config = ONLINE_LLM_MODEL.get(model_name, {}).copy()

    return config


def parse_triples(response, delimiter=KG_TRIPLE_DELIMITER):
    if not response:
        return []
    return response.split(delimiter)


# 用于知识三元组提取的提示模板
_DEFAULT_KNOWLEDGE_TRIPLE_EXTRACTION_TEMPLATE = (
    "从文本中提取所有的知识三元组。"
    " 知识三元组包含实体，属性，实体或者实体，关系，实体。"
    " 实体是被描述的实体，属性或者关系是两个实体之间存在的特定约束。\n\n"
    "例子\n"
    "Nevada是美国的一个州，也是美国第一大黄金生产者。\n\n"
    f"输出: (美国,州,Nevada){KG_TRIPLE_DELIMITER}(美国,州,Nevada)"
    f"{KG_TRIPLE_DELIMITER}(Nevada,第一大黄金生产者,美国)\n"
    "例子\n"
    "遥感（remote sensing）是指非接触的，远距离的探测技术。一般指运用传感器/遥感器对物体的电磁波的辐射、反射特性的探测。\n"
    f"输出: (遥感, 是, 探测技术){KG_TRIPLE_DELIMITER}(遥感, 探测, 物体的电磁波的辐射)){KG_TRIPLE_DELIMITER}(遥感, 探测, 反射特性)\n"
    "例子\n"
    "{text}"
    "输出:"
)

llm = get_OpenAI(model_name='openai-api', temperature=0.7)
chain = LLMChain(llm=llm, prompt=PromptTemplate(
    input_variables=["text"],
    template=_DEFAULT_KNOWLEDGE_TRIPLE_EXTRACTION_TEMPLATE,
))


def getTuple(prompt: str) -> str:
    global chain
    triples = chain.invoke({'text': prompt}).get('text')
    triples_list = parse_triples(triples)
    return triples_list


