import gradio as gr from llama import Type, Context, LLM import os class Question(Type): question: str = Context("a question") class Response(Type): response: str = Context("the response to the question") def lamini(input): #return "Hello " + name + "!!" llm=LLM(name="lamini-instruct", config={ "production":{ "key": os.environ["LAMINI-KEY"] } }) user_query_text=Question(question=input) result = llm( input=user_query_text, output_type=Response, model_name="lamini/instruct-tuned-2.8b" ) return parse_response(result.response) def parse_response(string): break_point = string.find("\n\n") if break_point >= 0: string = string[:break_point] return string.strip() iface = gr.Interface(fn=lamini, inputs="text", outputs="text") iface.launch()