File size: 3,355 Bytes
b4e5268
8f64959
55a8b20
dd507bb
4f4aca6
 
b4e5268
 
 
 
 
397c421
0a98570
9b1d956
6a7d03a
 
2cc0376
4f4aca6
 
 
37b2fc4
6a7d03a
 
 
 
 
 
 
 
 
 
9d68da3
b956157
b4e5268
 
 
b956157
 
b4e5268
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4f4aca6
 
 
dc9b093
4f4aca6
b4e5268
 
 
 
 
 
 
 
4f4aca6
b4e5268
 
 
 
 
 
 
 
 
ab55f29
 
2651861
 
ab55f29
 
 
4f4aca6
ab55f29
 
 
0f28a78
 
2651861
ab55f29
2651861
ab55f29
 
 
 
 
2651861
 
 
 
ab55f29
2651861
 
0f28a78
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
import os
import streamlit as st
from langchain_community.vectorstores import FAISS
from langchain_community.embeddings import HuggingFaceEmbeddings

from langchain_huggingface import HuggingFaceEndpoint

from langchain.prompts import PromptTemplate
from langchain.schema.runnable import RunnablePassthrough
from langchain.chains import LLMChain

from huggingface_hub import login
login(token=st.secrets["HF_TOKEN"])

from langchain_community.document_loaders import TextLoader
from langchain_text_splitters import CharacterTextSplitter
from langchain_community.document_loaders import PyPDFLoader
from langchain.chains import RetrievalQA
from langchain.prompts import PromptTemplate
from langchain.embeddings.huggingface import HuggingFaceEmbeddings

# Montez Google Drive
loader = PyPDFLoader("test-1.pdf")
data = loader.load()
# split the documents into chunks
text_splitter1 = CharacterTextSplitter(chunk_size=512, chunk_overlap=0,separator="\n\n")
texts = text_splitter1.split_documents(data)
db = FAISS.from_documents(texts,
                          HuggingFaceEmbeddings(model_name='sentence-transformers/all-MiniLM-L12-v2'))



retriever = db.as_retriever(
    search_type="mmr",
    search_kwargs={'k': 1}
)


prompt_template = """
### [INST]
Instruction: You are a Q&A assistant. Your goal is to answer questions as accurately as possible based on the instructions and context provided without using prior knowledge.You answer in FRENCH
        Analyse carefully the context and provide a direct answer based on the context.
Answer in french only
{context}
Vous devez répondre aux questions en français.

### QUESTION:
{question}
[/INST]
Answer in french only
 Vous devez répondre aux questions en français.

 """

repo_id = "mistralai/Mistral-7B-Instruct-v0.2"

mistral_llm = HuggingFaceEndpoint(
    repo_id=repo_id, max_length=512, temperature=0.05, huggingfacehub_api_token=st.secrets["HF_TOKEN"]
)

# Create prompt from prompt template
prompt = PromptTemplate(
    input_variables=["question"],
    template=prompt_template,
)

# Create llm chain
llm_chain = LLMChain(llm=mistral_llm, prompt=prompt)


retriever.search_kwargs = {'k':1}
qa = RetrievalQA.from_chain_type(
    llm=mistral_llm,
    chain_type="stuff",
    retriever=retriever,
    chain_type_kwargs={"prompt": prompt},
)
import streamlit as st

# Streamlit interface with improved aesthetics
st.set_page_config(page_title="Chatbot Interface", page_icon="🤖")

# Define function to handle user input and display chatbot response
def chatbot_response(user_input):
    response = qa.run(user_input)
    return response

# Streamlit components
st.markdown("# 🤖 **Your Friendly Methodo Assistant**")
st.markdown("## \"Votre Réponse à Chaque Défi Méthodologique\" 📈")

user_input = st.text_input("You:", "")
submit_button = st.button("Send 📨")

# Handle user input
if submit_button:
    if user_input.strip() != "":
        bot_response = chatbot_response(user_input)
        st.markdown("### You:")
        st.markdown(f"> {user_input}")
        st.markdown("### Bot:")
        st.markdown(f"> {bot_response}")
    else:
        st.warning("⚠️ Please enter a message.")

# Motivational quote at the bottom
st.markdown("---")
st.markdown("*La collaboration est la clé du succès. Chaque question trouve sa réponse, chaque défi devient une opportunité.*")