import gradio as gr from langchain import PromptTemplate, LLMChain from langchain.llms import HuggingFaceHub template_by_step = """Question: {question} Answer: Let's think step by step.""" def run( question: gr.Textbox = None, repo_id: gr.Dropdown = "google/flan-t5-xxl", temperature: gr.Slider = 0.5, max_length: gr.Slider = 64, by_steq: gr.Checkbox = False, ): template = template_by_step if by_steq else "{question}" prompt = PromptTemplate(template=template, input_variables=["question"]) llm = HuggingFaceHub( repo_id=repo_id, model_kwargs={"temperature": temperature, "max_length": max_length} ) llm_chain = LLMChain(prompt=prompt, llm=llm) result = llm_chain.run(question) print(result) return result inputs = [ gr.Textbox(label="Question"), gr.Dropdown(["google/flan-t5-xxl", "google/flan-t5-base"], value="google/flan-t5-xxl", label="Model", allow_custom_value=True), gr.Slider(0.0, 1.0, value=0.5, step=0.05, label="Temperature"), gr.Slider(20, 1000, value=64, label="Max Length"), gr.Checkbox(label="Think step by step", value=False), ] examples = [ ["What is the capital of France?"], ["What's the Earth total population?"], ["Who won the FIFA World Cup in the year 1994?"], ["What NFL team won the Super Bowl in the year Justin Bieber was born?"], ["Translate the following to French: There are so many plans"], ["Write an article to introduce machine learning"], ] title = "Langchain w/ HF Models" gr.Interface( fn=run, inputs=inputs, outputs='label', title=title, examples=examples, ).launch()