import os import pickle from typing import Optional, Tuple import gradio as gr from threading import Lock from langchain.llms import OpenAI from langchain.chains import ChatVectorDBChain from template import QA_PROMPT, CONDENSE_QUESTION_PROMPT from pdf2vectorstore import convert_to_vectorstore def get_chain(api_key, vectorstore, model_name): llm = OpenAI(model_name = model_name, temperature=0, openai_api_key=api_key) qa_chain = ChatVectorDBChain.from_llm( llm, vectorstore, qa_prompt=QA_PROMPT, condense_question_prompt=CONDENSE_QUESTION_PROMPT, ) return qa_chain def set_openai_api_key(api_key: str, vectorstore, model_name: str): if api_key: chain = get_chain(api_key, vectorstore, model_name) return chain class ChatWrapper: def __init__(self): self.lock = Lock() self.previous_url = "" self.vectorstore_state = None self.chain = None def __call__( self, api_key: str, arxiv_url: str, inp: str, history: Optional[Tuple[str, str]], model_name: str, ): if not arxiv_url or not api_key: history = history or [] history.append((inp, "Please provide both arXiv URL and API key to begin")) return history, history if arxiv_url != self.previous_url: history = [] vectorstore = convert_to_vectorstore(arxiv_url, api_key) self.previous_url = arxiv_url self.chain = set_openai_api_key(api_key, vectorstore, model_name) self.vectorstore_state = vectorstore if self.chain is None: self.chain = set_openai_api_key(api_key, self.vectorstore_state, model_name) self.lock.acquire() try: history = history or [] if self.chain is None: history.append((inp, "Please paste your OpenAI key to use")) return history, history import openai openai.api_key = api_key output = self.chain ({"question": inp, "chat_history": history})["answer"] history.append((inp, output)) except Exception as e: raise e finally: api_key = "" self.lock.release() return history, history chat = ChatWrapper() block = gr.Blocks(css=".gradio-container {background-color: #f8f8f8; font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif}") with block: gr.HTML("