File size: 1,329 Bytes
bef7abc
 
3a33679
6cef28f
8866ca4
2d94679
712f163
bef7abc
026d9d8
a5f9c84
bef7abc
5c6eae7
 
4e2fd87
5c6eae7
bef7abc
a8d72ba
bef7abc
 
79d935d
8866ca4
 
72ef409
 
bef7abc
8866ca4
bef7abc
d46bc93
 
7fc25df
 
5f87ea2
 
79d935d
7fc25df
 
79d935d
 
8866ca4
7fc25df
79d935d
 
8866ca4
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import streamlit as st
import transformers
import tensorflow
import PIL
from PIL import Image
import time
from transformers import pipeline

model_checkpoint = "Modfiededition/t5-base-fine-tuned-on-jfleg"


@st.cache(allow_output_mutation=True, suppress_st_warning=True)
def load_model():
    return pipeline("text2text-generation", model=model_checkpoint)
model = load_model()

    
#prompts
st.title("Writing Assistant for you 🦄")
image = Image.open('grammar.jpg').resize((1000,300))
st.image(image, caption='Sunrise by the mountains')
st.subheader("Some examples: ")
example_1 = st.button("I am write on AI")
example_2 = st.button("This sentence has, bads grammar mistake!")

textbox = st.text_area('Write your text in this box:', '', height=100, max_chars=1000)

button = st.button('Detect grammar mistakes:')

# output
if example_1:
    with st.spinner('In progress.......'):
        output_text = model("I am write on AI")[0]["generated_text"]
    st.markdown("## "+output_text)

if example_2:
    with st.spinner('In progress.......'):
        output_text = model("This sentence has, bads grammar mistake!")[0]["generated_text"]
    st.markdown("**"+output_text+"**")
if button:
    with st.spinner('In progress.......'):
        output_text = model(textbox)[0]["generated_text"]
    st.markdown("**"+output_text+"**")