import gradio as gr from langchain import LlamaCpp from langchain.callbacks import StreamingStdOutCallbackHandler from langchain.callbacks.manager import CallbackManager from langchain.chains.question_answering import load_qa_chain import dill def greet(name): db = dill.load(open("client_embebedings.mate", "rb")) docs = db.similarity_search(name) return chain.run(input_documents=docs, question=name) def greet1(name): return "Hello ya client" +name iface = gr.Blocks() with iface: callback_manager = CallbackManager([StreamingStdOutCallbackHandler()]) chain = load_qa_chain( LlamaCpp( model_path="llama-13B-Q4_K_M.gguf", temperature=0, top_p= 1, callback_manager=callback_manager, verbose=True, ), chain_type="stuff") name = gr.Textbox(label="Name") output = gr.Textbox(label="Output Box") greet_btn = gr.Button("Greet") greet_btn.click(fn=greet, inputs=name, outputs=output, api_name="greet") greet1_btn = gr.Button("Greet1") greet1_btn.click(fn=greet1, inputs=name, outputs=output, api_name="testing") iface.launch()