import os from typing import Optional, Tuple import gradio as gr from langchain.chains import ConversationChain from langchain.llms import OpenAI from threading import Lock def load_chain(): """Logic for loading the chain you want to use should go here.""" llm = OpenAI(temperature=0) chain = ConversationChain(llm=llm) return chain def set_openai_api_key(api_key: str): """Set the api key and return chain. If no api_key, then None is returned. """ if api_key: os.environ["OPENAI_API_KEY"] = api_key chain = load_chain() os.environ["OPENAI_API_KEY"] = "" return chain class ChatWrapper: def __init__(self): self.lock = Lock() def __call__( self, api_key: str, inp: str, history: Optional[Tuple[str, str]], chain: Optional[ConversationChain] ): """Execute the chat functionality.""" self.lock.acquire() try: history = history or [] # If chain is None, that is because no API key was provided. if chain is None: history.append((inp, "Please paste your OpenAI key to use")) return history, history # Set OpenAI key import openai openai.api_key = api_key # Run chain and append input. output = chain.run(input=inp) history.append((inp, output)) except Exception as e: raise e finally: self.lock.release() return history, history chat = ChatWrapper() block = gr.Blocks(css=".gradio-container {background-color: lightgray}") with block: with gr.Row(): gr.Markdown("