Spaces:
Running
Running
Ilyas KHIAT
commited on
Commit
•
f4ed86e
1
Parent(s):
e9ce696
paste
Browse files- .gitignore +3 -3
- chat_te.py +4 -4
- requirements.txt +1 -0
.gitignore
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
__pycache__/
|
2 |
-
.streamlit
|
3 |
-
|
4 |
-
|
|
|
1 |
__pycache__/
|
2 |
+
.streamlit
|
3 |
+
DATA_bziiit/vectorstore_op
|
4 |
+
.env
|
chat_te.py
CHANGED
@@ -20,12 +20,12 @@ def get_docs_from_pdf(file):
|
|
20 |
return docs
|
21 |
|
22 |
def get_doc_chunks(docs):
|
23 |
-
text_splitter = SemanticChunker(OpenAIEmbeddings())
|
24 |
chunks = text_splitter.split_documents(docs)
|
25 |
return chunks
|
26 |
|
27 |
def get_vectorstore_from_docs(doc_chunks):
|
28 |
-
embedding = OpenAIEmbeddings()
|
29 |
vectorstore = FAISS.from_documents(documents=doc_chunks, embedding=embedding)
|
30 |
return vectorstore
|
31 |
|
@@ -47,7 +47,7 @@ def create_db(file):
|
|
47 |
# docs = get_docs_from_pdf(file)
|
48 |
# doc_chunks = get_doc_chunks(docs)
|
49 |
# vectorstore = get_vectorstore_from_docs(doc_chunks)
|
50 |
-
vectorstore = FAISS.load_local(
|
51 |
return vectorstore
|
52 |
|
53 |
def get_response(chain,user_query, chat_history):
|
@@ -81,7 +81,7 @@ def display_chat_te():
|
|
81 |
AIMessage(content="Salut, posez-moi vos question sur la transistion ecologique."),
|
82 |
]
|
83 |
if "chain" not in st.session_state:
|
84 |
-
db=create_db("DATA_bziiit/
|
85 |
chain = get_conversation_chain(db)
|
86 |
st.session_state.chain = chain
|
87 |
|
|
|
20 |
return docs
|
21 |
|
22 |
def get_doc_chunks(docs):
|
23 |
+
text_splitter = SemanticChunker(OpenAIEmbeddings(model="text-embedding-3-large"))
|
24 |
chunks = text_splitter.split_documents(docs)
|
25 |
return chunks
|
26 |
|
27 |
def get_vectorstore_from_docs(doc_chunks):
|
28 |
+
embedding = OpenAIEmbeddings(model="text-embedding-3-large")
|
29 |
vectorstore = FAISS.from_documents(documents=doc_chunks, embedding=embedding)
|
30 |
return vectorstore
|
31 |
|
|
|
47 |
# docs = get_docs_from_pdf(file)
|
48 |
# doc_chunks = get_doc_chunks(docs)
|
49 |
# vectorstore = get_vectorstore_from_docs(doc_chunks)
|
50 |
+
vectorstore = FAISS.load_local(file, OpenAIEmbeddings(model="text-embedding-3-large"),allow_dangerous_deserialization= True)
|
51 |
return vectorstore
|
52 |
|
53 |
def get_response(chain,user_query, chat_history):
|
|
|
81 |
AIMessage(content="Salut, posez-moi vos question sur la transistion ecologique."),
|
82 |
]
|
83 |
if "chain" not in st.session_state:
|
84 |
+
db=create_db("./DATA_bziiit/vectorstore_op")
|
85 |
chain = get_conversation_chain(db)
|
86 |
st.session_state.chain = chain
|
87 |
|
requirements.txt
CHANGED
@@ -37,3 +37,4 @@ kaleido
|
|
37 |
langchain-core
|
38 |
langchain-mistralai
|
39 |
firecrawl-py
|
|
|
|
37 |
langchain-core
|
38 |
langchain-mistralai
|
39 |
firecrawl-py
|
40 |
+
st_copy_to_clipboard
|