tomaszki commited on
Commit
c5b2dfc
1 Parent(s): d20255b

First app version

Browse files
Files changed (3) hide show
  1. README.md +1 -1
  2. app.py +36 -0
  3. requirements.txt +4 -0
README.md CHANGED
@@ -1,6 +1,6 @@
1
  ---
2
  title: Mail Fixer
3
- emoji: 🏃
4
  colorFrom: yellow
5
  colorTo: blue
6
  sdk: streamlit
 
1
  ---
2
  title: Mail Fixer
3
+ emoji: 📧
4
  colorFrom: yellow
5
  colorTo: blue
6
  sdk: streamlit
app.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import torch
3
+ from transformers import AutoTokenizer, AutoModelForCausalLM, TextStreamer
4
+
5
+
6
+ class StreamlitStreamer(TextStreamer):
7
+ def on_finalized_text(self, text: str, stream_end: bool = False):
8
+ st.session_state['new_mail'] += text
9
+ new_mail.write(st.session_state['new_mail'])
10
+
11
+ device = 'cuda' if torch.cuda.is_available() else 'cpu'
12
+
13
+ @st.cache_resource
14
+ def load_model():
15
+ return AutoModelForCausalLM.from_pretrained(
16
+ "tomaszki/mail_fixer",
17
+ ).to(device)
18
+
19
+ @st.cache_resource
20
+ def load_tokenizer():
21
+ return AutoTokenizer.from_pretrained("facebook/opt-125m")
22
+
23
+ model = load_model()
24
+ tokenizer = load_tokenizer()
25
+
26
+ st.title('Mail fixer')
27
+
28
+ mail = st.text_area('Enter your mail here')
29
+ new_mail = st.text('')
30
+
31
+ if mail:
32
+ st.session_state['new_mail'] = ''
33
+ streamer = StreamlitStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)
34
+ prompt = f'Original email:\n{mail}\nFixed email:\n'
35
+ tokenized = tokenizer(prompt, return_tensors='pt').to(device)
36
+ output = model.generate(**tokenized, max_new_tokens=1024, streamer=streamer)
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ transformers
2
+
3
+ --find-links https://download.pytorch.org/whl/torch_stable.html
4
+ torch==2.1.0+cpu