# -*- coding : utf-8 -*-


import os
# from bot.bot import Bot
from settings import logging
from llama_index import GPTSimpleVectorIndex, SimpleDirectoryReader
from llama_index import QuestionAnswerPrompt
from llama_index import LLMPredictor, ServiceContext
from langchain import OpenAI
from langchain.llms.base import LLM
from llama_index import LangchainEmbedding, GPTListIndex, PromptHelper
from transformers import pipeline
from typing import Optional, List, Mapping, Any

os.environ["OPENAI_API_KEY"] = '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'
index = GPTSimpleVectorIndex.load_from_disk('index.json')


class DocumentAssistanceBot(object):

    def __init__(self):
        # self.openai_api_key = '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'

        TEXT_QA_PROMPT_TMPL = (
            "以下是上下文信息。\n"
            "---------------------\n"
            "{context_str}"
            "\n---------------------\n"
            "在不依赖先验知识的情况下，根据上下文信息回答问题:\n"
            "{query_str}\n\n### Response:\n"

        )
        '''self.QA_PROMPT = QuestionAnswerPrompt(TEXT_QA_PROMPT_TMPL)
        # define our LLM
        self.llm_predictor = LLMPredictor(llm=CustomLLM())

        self.document_service_context = ServiceContext.from_defaults(llm_predictor=self.llm_predictor,
                                                                    prompt_helper=prompt_helper)
        self.embedding_service_context = ServiceContext.from_defaults(embed_model=)'''


    def reply(self, query):
        response = index.query(query)

        return '{}\n\n"出处"{}'.format(response.response, response.source_nodes[0].source_text)















