import gradio as gr from langchain.embeddings.openai import OpenAIEmbeddings from langchain.chat_models import ChatOpenAI from langchain.chains import ConversationalRetrievalChain from langchain.vectorstores import FAISS from langchain.document_loaders.csv_loader import CSVLoader def conversational_chat(query, lang_model_key, file_upload): global chain, session_history if file_upload is not None: # Loading the CSV file loader = CSVLoader(file_path=file_upload, encoding="utf-8") data = loader.load() # Initializing embeddings and vectors embeddings = OpenAIEmbeddings(openai_api_key=lang_model_key) vectors = FAISS.from_documents(data, embeddings) # Creating the ConversationalRetrievalChain chain = ConversationalRetrievalChain.from_llm(llm=ChatOpenAI(temperature=0.0, model_name='gpt-4', openai_api_key=lang_model_key), retriever=vectors.as_retriever()) session_history = [] result = chain({"question": query, "chat_history": session_history}) session_history.append((query, result["answer"])) return result["answer"] iface = gr.Interface(fn=conversational_chat, inputs=[gr.Textbox(label="Query", lines=7), gr.Textbox(label="Your OpenAI API key:", type="password"), gr.File(label="Upload your CSV file:", type="binary")], outputs="text", title="Conversational CSV Chat: Please upload your file and set your API Key in order to use the functionalities", ) iface.launch()