Legal_app / app.py
JivitteshS's picture
Update app.py
6b0c915
raw
history blame contribute delete
660 Bytes
import gradio as gr
import openai
# Initialize OpenAI chat model
openai.api_key = "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"
chat_model = "gpt-3.5-turbo"
# Initialize your Chroma vector database
vectordb = Chroma(persist_directory=persist_directory, embedding_function=embedding)
# Function to answer questions
def answer_question(question):
qa_chain = RetrievalQA.from_chain_type(
llm,
retriever=vectordb.as_retriever())
result = qa_chain({"query": question})
return result['result']
# Create a Gradio interface
iface = gr.Interface(fn=answer_question, inputs="text", outputs="text")
# Launch the Gradio app
iface.launch()