'''
Author: caoniu caoniu@jushuitan.com
Date: 2023-06-28 19:01:41
LastEditors: caoniu caoniu@jushuitan.com
LastEditTime: 2023-06-28 23:28:01
FilePath: /pdf_knowledge_base/knowledge.py
Description: 这是默认设置,请设置`customMade`, 打开koroFileHeader查看配置 进行设置: https://github.com/OBKoro1/koro1FileHeader/wiki/%E9%85%8D%E7%BD%AE
'''

from langchain.document_loaders import PyPDFDirectoryLoader
from langchain.chains.qa_with_sources import load_qa_with_sources_chain
from langchain.vectorstores import Pinecone
import pinecone
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.chat_models import ChatOpenAI
from langchain.prompts import PromptTemplate
import sys
import os
from dotenv import load_dotenv

load_dotenv()

if os.getenv("OPENAI_API_KEY") is None or len(os.getenv("OPENAI_API_KEY")) == 0:
    print("Please configure your OpenAI API key.")
    exit()
elif os.getenv("PINECONE_API_KEY") is None or len(os.getenv("PINECONE_API_KEY")) == 0:
    print("Please configure your Pinecone API key.")
    exit()
elif os.getenv("PINECONE_ENV") is None or len(os.getenv("PINECONE_ENV")) == 0:
    print("Please configure your Pinecone ENV.")
    exit()

query = ""

if len(sys.argv) > 1:
    query = sys.argv[1]
else:
    while (len(query) == 0):
        query = input("\nEnter a question: ")

print("\nThinking ...")

# initialize pinecone
pinecone.init(
    api_key=os.getenv("PINECONE_API_KEY"),  # find at app.pinecone.io
    environment=os.getenv("PINECONE_ENV")  # next to api key in console
)
index_name = 'knowledge-base'
docsearch = Pinecone.from_existing_index(index_name, OpenAIEmbeddings())

output = docsearch.similarity_search(query)
# print(f'\n{output}\n')

question_prompt_template = """Use the following portion of a long document to see if any of the text is relevant to answer the question.
Return any relevant text translated into Chinese.
{context}
Question: {question}
Relevant text, if any, in Chinese:"""
QUESTION_PROMPT = PromptTemplate(
    template=question_prompt_template, input_variables=["context", "question"]
)

combine_prompt_template = """Given the following extracted parts of a long document and a question, create a final answer Chinese, and output the sources in Chinese.
If you don't know the answer, just say that you don't know. Don't try to make up an answer.

QUESTION: {question}
=========
{summaries}
=========
Answer in Chinese:"""
COMBINE_PROMPT = PromptTemplate(
    template=combine_prompt_template, input_variables=["summaries", "question"]
)
chain = load_qa_with_sources_chain(ChatOpenAI(temperature=0, verbose=True, model='gpt-3.5-turbo-0613'),
                                   chain_type="map_reduce", return_map_steps=True, question_prompt=QUESTION_PROMPT, combine_prompt=COMBINE_PROMPT)

answer = chain({"input_documents": output, "question": query},
               return_only_outputs=True)
print('')
print(answer['output_text'])
