File size: 595 Bytes
bef7abc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
import streamlit as st
import transformers


from transformers import pipeline

# Replace this with your own checkpoint
model_checkpoint = "huggingface/models/t5-base-fine-tuned-on-jfleg"


@st.cache
def load_model(model_name):
    translator= pipeline("text2text: generation", model=model_checkpoint)
    return translator
    
translator= load_model(model_checkpoint)

default_value = "Write your text here!"
#prompts
st.title("Writing Assistant for you 🦄")

sent = st.text_area("Text", default_value, height = 275)
generated_sequences = translator(sent)

st.write(generated_sequences[-1])