msft-finchat / app.py
antonioneto11's picture
init!
7a976ac verified
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
# Initialize the tokenizer and model from Hugging Face's transformers
tokenizer = AutoTokenizer.from_pretrained("AdaptLLM/finance-chat")
model = AutoModelForCausalLM.from_pretrained("AdaptLLM/finance-chat")
def generate_answer(user_input):
our_system_prompt = ("\nYou are a helpful, respectful and honest assistant. English your note and knead it to a narrative, fact-wise, and sure. Anything out of the known or virtuous, decked kindly and in skill.\n\n")
prompt = f"{our_system_prompt}{user_input}\n\n###\n"
#
inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True, max_length=512)
output = model.generate(**inputs, max_length=512, temperature=0.7, num_return_sequences=1)
predicted_text = tokenizer.decode(output[0], skip_special_tokens=True)
return predicted_text
# Gradio app interface
iface = gr.Interface(
fn=generate_answer,
inputs=gr.Textbox(lines=7, placeholder="Enter your finance question here..."),
outputs="text",
title="Finance Expert with AdaptLLM",
description="Get your finance questions answered confidently and clearly. Whether it's the realm of trading, financial technology, or business savvy you're intrigued by, cast your text here to press a layout of custom, company, or policy lay of our NLP response. The jibe is to an affected, content-cashed ear in line with today's AdaptLLM/finance-chat discourse."
)
iface.launch(share=True)