gradio / test_gradio.py
karar-shah's picture
Upload folder using huggingface_hub
2c4e183
raw
history blame
553 Bytes
import gradio as gr
from langchain.llms import GooglePalm
from langchain import PromptTemplate, LLMChain
llm = GooglePalm(temperature=0.1, google_api_key= "AIzaSyB5XIeNPyhIy29g4DUNZzVBKfsa-fVZtrk")
template = """Question: {question}
Answer: Let's think step by step."""
prompt_open = PromptTemplate(template=template, input_variables=["question"])
open_chain = LLMChain(prompt=prompt_open,llm = llm)
def predict(message, history):
gpt_response = open_chain.run(message)
return gpt_response
gr.ChatInterface(predict).launch(share=True)