Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -617,6 +617,8 @@ With every Rowad program, participants will be mentored by seasoned bankers and
|
|
617 |
|
618 |
|
619 |
import gradio as gr
|
|
|
|
|
620 |
|
621 |
#State of Union Text
|
622 |
|
@@ -632,15 +634,12 @@ from langchain.vectorstores.faiss import FAISS
|
|
632 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
633 |
from langchain import OpenAI, VectorDBQA
|
634 |
|
635 |
-
|
636 |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
|
637 |
texts = text_splitter.split_text(state_of_the_union)
|
638 |
|
639 |
embeddings = OpenAIEmbeddings()
|
640 |
vectorstore = FAISS.from_texts(texts, embeddings)
|
641 |
qa = VectorDBQA.from_chain_type(llm=OpenAI(model_name="text-davinci-003", temperature=0, max_tokens=256), chain_type="stuff", vectorstore=vectorstore)
|
642 |
-
#llm = ChatOpenAI(model_name="gpt-3.5-turbo", temperature=0, max_tokens=256) # Modify model_name if you have access to GPT-4
|
643 |
-
|
644 |
|
645 |
from langchain.chains import load_chain
|
646 |
|
@@ -649,10 +648,17 @@ chain = load_chain("lc://chains/vector-db-qa/stuff/chain.json", vectorstore=vect
|
|
649 |
def run_chain(query):
|
650 |
return chain.run(query)
|
651 |
|
|
|
|
|
|
|
|
|
|
|
652 |
inputs = gr.inputs.Textbox(lines=1, label="Enter your Question:")
|
653 |
outputs = gr.outputs.Textbox(label="Answer:")
|
654 |
|
655 |
def run_app(input):
|
656 |
-
|
|
|
|
|
657 |
|
658 |
-
gr.Interface(fn=run_app, inputs=inputs, outputs=outputs, title="The following is a conversation with a human called Shegardi. Shegardi is helpful, precise, truthful, and very friendly. Also, Shegardi is an employee of Warba Bank, located in Kuwait. Shegardi will only use the information provided to him.").launch()
|
|
|
617 |
|
618 |
|
619 |
import gradio as gr
|
620 |
+
import os
|
621 |
+
import csv
|
622 |
|
623 |
#State of Union Text
|
624 |
|
|
|
634 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
635 |
from langchain import OpenAI, VectorDBQA
|
636 |
|
|
|
637 |
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
|
638 |
texts = text_splitter.split_text(state_of_the_union)
|
639 |
|
640 |
embeddings = OpenAIEmbeddings()
|
641 |
vectorstore = FAISS.from_texts(texts, embeddings)
|
642 |
qa = VectorDBQA.from_chain_type(llm=OpenAI(model_name="text-davinci-003", temperature=0, max_tokens=256), chain_type="stuff", vectorstore=vectorstore)
|
|
|
|
|
643 |
|
644 |
from langchain.chains import load_chain
|
645 |
|
|
|
648 |
def run_chain(query):
|
649 |
return chain.run(query)
|
650 |
|
651 |
+
def save_question_to_csv(file_path, question):
|
652 |
+
with open(file_path, 'a', newline='', encoding='utf-8') as csvfile:
|
653 |
+
writer = csv.writer(csvfile)
|
654 |
+
writer.writerow([question])
|
655 |
+
|
656 |
inputs = gr.inputs.Textbox(lines=1, label="Enter your Question:")
|
657 |
outputs = gr.outputs.Textbox(label="Answer:")
|
658 |
|
659 |
def run_app(input):
|
660 |
+
answer = run_chain(input)
|
661 |
+
save_question_to_csv('questions.csv', input)
|
662 |
+
return answer
|
663 |
|
664 |
+
gr.Interface(fn=run_app, inputs=inputs, outputs=outputs, title="The following is a conversation with a human called Shegardi. Shegardi is helpful, precise, truthful, and very friendly. Also, Shegardi is an employee of Warba Bank, located in Kuwait. Shegardi will only use the information provided to him.").launch()
|