Spaces:
Sleeping
Sleeping
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer | |
tokenizerModelName = 'google/flan-t5-base' | |
instruct_model_name='truocpham/flan-dialogue-summary-checkpoint' | |
tokenizer = AutoTokenizer.from_pretrained(tokenizerModelName) | |
model = AutoModelForSeq2SeqLM.from_pretrained(instruct_model_name) | |
def SummarizeThis(Dialogue): | |
prompt = f""" | |
Summarize the following conversation in more than 10 lines please. | |
{Dialogue} | |
Summary: | |
""" | |
inputs = tokenizer(prompt, return_tensors='pt') | |
Summary = tokenizer.decode( | |
model.generate( | |
inputs["input_ids"], | |
max_new_tokens=800, | |
)[0], | |
skip_special_tokens=True | |
) | |
return Summary | |
# Making the gradio application | |
import gradio as gr | |
iface = gr.Interface(fn=SummarizeThis, inputs="text", outputs=["text"], title="Summarization") | |
iface.launch() |