Modfiededition commited on
Commit
026d9d8
β€’
1 Parent(s): 688a116

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -8
app.py CHANGED
@@ -2,23 +2,30 @@ import streamlit as st
2
  import transformers
3
  import tensorflow
4
 
5
- from transformers import pipeline
6
 
7
- # Replace this with your own checkpoint
8
- model_checkpoint = "Modfiededition/t5-base-fine-tuned-on-jfleg"
9
 
 
 
10
 
 
11
  def load_model(model_name):
12
- translator= pipeline("text2text-generation", model=model_checkpoint)
13
- return translator
14
 
15
- translator= load_model(model_checkpoint)
16
 
17
  default_value = "Write your text here!"
18
  #prompts
19
  st.title("Writing Assistant for you πŸ¦„")
20
 
21
  sent = st.text_area("Text", default_value, height = 275)
22
- generated_sequences = translator(sent)
23
 
24
- st.write(generated_sequences[-1])
 
 
 
 
 
 
 
2
  import transformers
3
  import tensorflow
4
 
 
5
 
6
+ from transformers import AutoTokenizer
7
+ from transformers import TFAutoModelForSeq2SeqLM
8
 
9
+ model_checkpoint = "Modfiededition/t5-base-fine-tuned-on-jfleg"
10
+ tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
11
 
12
+ @st.cache
13
  def load_model(model_name):
14
+ model = TFAutoModelForSeq2SeqLM.from_pretrained(model_name)
15
+ return model
16
 
17
+ model= load_model(model_checkpoint)
18
 
19
  default_value = "Write your text here!"
20
  #prompts
21
  st.title("Writing Assistant for you πŸ¦„")
22
 
23
  sent = st.text_area("Text", default_value, height = 275)
 
24
 
25
+ inputs = tokenizer("Grammar: "+sent,return_tensors="tf")
26
+
27
+ output_ids = model.generate(inputs["input_ids"]).numpy()[0][1:-1]
28
+
29
+ generated_sequences = tokenizer.decode(output_ids)
30
+
31
+ st.write(generated_sequences)