Simon Salmon commited on
Commit
5097da2
1 Parent(s): 9a2be8c

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -0
app.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import numpy as np
3
+ import pandas as pd
4
+ import os
5
+ import torch
6
+ import torch.nn as nn
7
+ from transformers import ElectraModel, AutoConfig, GPT2LMHeadModel
8
+ from transformers.activations import get_activation
9
+ from transformers import AutoTokenizer
10
+
11
+
12
+ st.title('Informal to Formal')
13
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
14
+
15
+ st.text('''How To Make Prompt:
16
+
17
+ informal english: space is huge and needs to be explored.
18
+ Translated into the Style of Abraham Lincoln: space awaits traversal, a new world whose boundaries are endless.
19
+ Translated into the Style of Abraham Lincoln: space is a boundless expanse, a vast virgin domain awaiting exploration.
20
+
21
+ informal english: i am very ready to do that just that.
22
+ Translated into the Style of Abraham Lincoln: you can assure yourself of my readiness to work toward this end.
23
+ Translated into the Style of Abraham Lincoln: please be assured that i am most ready to undertake this laborious task.
24
+
25
+ informal english: meteors are much harder to see, because they are only there for a fraction of a second.
26
+ Translated into the Style of Abraham Lincoln: meteors are not readily detectable, lasting for mere fractions of a second.
27
+
28
+ informal english:''')
29
+
30
+
31
+ from transformers import AutoTokenizer, AutoModelWithLMHead
32
+ tokenizer = AutoTokenizer.from_pretrained("gpt2")
33
+ model = AutoModelWithLMHead.from_pretrained("BigSalmon/MrLincoln3")
34
+
35
+ with st.form(key='my_form'):
36
+ prompt = st.text_area(label='Enter sentence')
37
+ submit_button = st.form_submit_button(label='Submit')
38
+
39
+ if submit_button:
40
+ with torch.no_grad():
41
+ text = tokenizer.encode(prompt)
42
+ myinput, past_key_values = torch.tensor([text]), None
43
+ myinput = myinput
44
+ myinput= myinput.to(device)
45
+ logits, past_key_values = model(myinput, past_key_values = past_key_values, return_dict=False)
46
+ logits = logits[0,-1]
47
+ probabilities = torch.nn.functional.softmax(logits)
48
+ best_logits, best_indices = logits.topk(60)
49
+ best_words = [tokenizer.decode([idx.item()]) for idx in best_indices]
50
+ text.append(best_indices[0].item())
51
+ best_probabilities = probabilities[best_indices].tolist()
52
+ words = []
53
+ st.write(best_words)