from flask import Flask
from langchain.schema import HumanMessage, SystemMessage
from langchain.chat_models.gigachat import GigaChat
from langchain_community.document_loaders import PyPDFLoader
from langchain_core.documents import Document
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores.faiss import FAISS
from langchain.chains import RetrievalQA

chat = GigaChat(credentials='Yjg4MTQzMmUtNDAwMS00NDk0LThjOGUtNmU5ZWQ2YzQ4NDQ2OmQ4MWMxZGZiLTFmNGYtNDk5NS05OGQzLTBiMzYyYWJmNjk3OA==', scope="GIGACHAT_API_CORP", verify_ssl_certs=False)

path_to_prompts = './' #change path
path_article = './articles/' #change path

with open(path_to_prompts+'tag_extractor.txt', 'r', encoding = 'utf-8') as file:
    prompt = file.read().replace('\n', '')

messages1 = [ #must be individual store for every user
    SystemMessage(
        content=prompt
    )
]

messages2 = [] #must be individual store for every user
prompt_types = [0]

app = Flask(__name__)
@app.route('/tag_extractor')
def home(user_input='Как сгладить функцию кватернионов?'): 
    messages1.append(HumanMessage(content=user_input))
    res = chat(messages1)
    messages1.append(res)
    return res.content
if __name__ == "__main__":
    app.run(debug=True, host='0.0.0.0', port=5000)

@app.route('/chat')
def home(user_input='Как сгладить функцию кватернионов?', articles_name = ['elibrary_1','elibrary_2','elibrary_3','elibrary_4','elibrary_5','elibrary_6','elibrary_7','elibrary_8','elibrary_9'], prompt_type=2): 
    if prompt_type!=prompt_types[-1]:
        path = path_to_prompts+'prompt_'+str(prompt_type)+'.txt' 
        with open(path, 'r', encoding = 'utf-8') as file:
            cur_prompt = file.read().replace('\n', '')
        messages2.append(SystemMessage(content=cur_prompt))
    messages2.append(HumanMessage(content=user_input))

    summ = []
    for x in articles_name:
        loader = PyPDFLoader(path_article+x+'.pdf')
        pages = loader.load_and_split()
        summ.append(pages[0])
    embedding = HuggingFaceEmbeddings(model_name="intfloat/multilingual-e5-base")
    faiss_db = FAISS.from_documents(summ, embedding=embedding)
    embedding_retriever = faiss_db.as_retriever(search_kwargs={"k": 7})
    qa = RetrievalQA.from_chain_type(
        llm=chat,
        chain_type="stuff",
        retriever=embedding_retriever
    )
    res = qa(cur_prompt+'"'+user_input+'"')
    messages2.append(res)
    return res.result

if __name__ == "__main__":
    app.run(debug=True, host='0.0.0.0', port=5000)