Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,36 +1,40 @@
|
|
1 |
import streamlit as st
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
3 |
-
|
4 |
-
|
5 |
-
|
|
|
6 |
|
7 |
-
@st.cache # only run
|
8 |
-
def
|
9 |
-
#for reproducability
|
10 |
-
#SEED = 12
|
11 |
-
|
12 |
-
from transformers import TFGPT2LMHeadModel, GPT2Tokenizer
|
13 |
tokenizer = GPT2Tokenizer.from_pretrained("gpt2-medium")
|
14 |
GPT2 = TFGPT2LMHeadModel.from_pretrained("gpt2-medium", pad_token_id=tokenizer.eos_token_id)
|
15 |
-
|
16 |
-
return tokenizer, GPT2
|
17 |
-
|
18 |
-
|
19 |
-
tokenizer, GPT2 = download_transformer()
|
20 |
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
|
|
|
|
|
|
29 |
do_sample = True,
|
30 |
-
max_length =
|
31 |
top_p = 0.8,
|
32 |
top_k = 0)
|
|
|
|
|
33 |
|
34 |
-
|
35 |
-
st.write('Clicked!')
|
36 |
-
st.write(words, num_words)
|
|
|
1 |
import streamlit as st
|
2 |
+
#import numpy as np
|
3 |
+
#import pandas as pd
|
4 |
+
#import os
|
5 |
+
#import torch
|
6 |
+
#import torch.nn as nn
|
7 |
+
#from transformers.activations import get_activation
|
8 |
+
from transformers import TFGPT2LMHeadModel, GPT2Tokenizer
|
9 |
+
import tensorflow as tf
|
10 |
|
11 |
+
st.title('DeepWords')
|
12 |
+
st.text('Still under Construction.')
|
13 |
+
st.text('Tip: Try writing a sentence and making the model predict final word.')
|
14 |
+
#device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
15 |
|
16 |
+
@st.cache(allow_output_mutation=True) # will only run once
|
17 |
+
def get_model():
|
|
|
|
|
|
|
|
|
18 |
tokenizer = GPT2Tokenizer.from_pretrained("gpt2-medium")
|
19 |
GPT2 = TFGPT2LMHeadModel.from_pretrained("gpt2-medium", pad_token_id=tokenizer.eos_token_id)
|
20 |
+
return GPT2, tokenizer
|
|
|
|
|
|
|
|
|
21 |
|
22 |
+
c = 5
|
23 |
+
with st.form(key='my_form'):
|
24 |
+
prompt = st.text_input('Enter sentence:', '')
|
25 |
+
c = st.number_input('Enter Number of words: ', 1)
|
26 |
+
submit_button = st.form_submit_button(label='Submit')
|
27 |
+
if submit_button:
|
28 |
+
tf.random.set_seed(SEED)
|
29 |
+
input_ids = tokenizer.encode(prompt, return_tensors='tf')
|
30 |
+
|
31 |
+
sample_output = GPT2.generate(
|
32 |
+
input_ids,
|
33 |
do_sample = True,
|
34 |
+
max_length = c,
|
35 |
top_p = 0.8,
|
36 |
top_k = 0)
|
37 |
+
|
38 |
+
st.write(tokenizer.decode(sample_output[0], skip_special_tokens = True), '...')
|
39 |
|
40 |
+
|
|
|
|