|
import streamlit as st |
|
from langchain.vectorstores import faiss |
|
from langchain.text_splitter import CharacterTextSplitter |
|
from langchain.embeddings import OpenAIEmbeddings, HuggingFaceInstructEmbeddings |
|
from langchain.vectorstores import FAISS |
|
from langchain.document_loaders import TextLoader |
|
from langchain.embeddings import SentenceTransformerEmbeddings |
|
from tempfile import NamedTemporaryFile |
|
import os |
|
import shutil |
|
|
|
try: |
|
shutil.rmtree("tempDir") |
|
except : |
|
pass |
|
try: |
|
os.mkdir("tempDir") |
|
except: |
|
pass |
|
def save_uploadedfile(uploadedfile): |
|
|
|
with open(os.path.join("tempDir",uploadedfile.name),"wb") as f: |
|
f.write(uploadedfile.getbuffer()) |
|
return st.success("Saved File:{} to tempDir".format(uploadedfile.name)) |
|
|
|
|
|
def main(): |
|
st.set_page_config(page_title="chet with unipv") |
|
st.text_input("fai una domanda al tuo professore ") |
|
with st.sidebar: |
|
st.subheader("Your_faiss_index") |
|
documents=st.file_uploader("upload your faiss index here ",accept_multiple_files=True) |
|
if st.button("Procedi"): |
|
for document in documents: |
|
save_uploadedfile(document) |
|
|
|
print(documents) |
|
query="chi è matteo salvini?" |
|
embeddings= HuggingFaceInstructEmbeddings(model_name="thenlper/gte-base") |
|
new_db = FAISS.load_local("tempDir", embeddings) |
|
docs = new_db.similarity_search(query) |
|
print(docs) |
|
if __name__=="__main__": |
|
main() |