|
import streamlit as st |
|
from transformers import T5ForConditionalGeneration, T5Tokenizer |
|
|
|
|
|
model_name = "google/flan-t5-large" |
|
model = T5ForConditionalGeneration.from_pretrained(model_name) |
|
tokenizer = T5Tokenizer.from_pretrained(model_name) |
|
|
|
|
|
st.title("AIBUDDY Chatbot") |
|
|
|
|
|
input_text = st.text_area("Enter your query:", "Translate English to French: 'Hello, how are you?'") |
|
|
|
if st.button("Generate"): |
|
|
|
input_ids = tokenizer(input_text, return_tensors="pt").input_ids |
|
|
|
|
|
with st.spinner("Generating response..."): |
|
output = model.generate(input_ids, max_length=100, num_return_sequences=1) |
|
|
|
|
|
response = tokenizer.decode(output[0], skip_special_tokens=True) |
|
|
|
|
|
st.subheader("Response:") |
|
st.write(response) |
|
|
|
|
|
|