Spaces:
Runtime error
Runtime error
File size: 2,701 Bytes
1359fe4 df767ad 1359fe4 17f4e86 df767ad 17f4e86 374191a 17f4e86 df767ad 91cceaf 17353e7 df767ad 6fa365b df767ad 646f8ee 8db772b df767ad 6fa365b df767ad 6fa365b df767ad 6fa365b df767ad 0875161 df767ad 6fa365b df767ad 374191a df767ad 8db772b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
import streamlit as st
import torch
import transformers
from transformers import AutoTokenizer, AutoModelWithLMHead
device=torch.device("cuda" if torch.cuda.is_available() else "cpu")
# device=torch.device("cpu")
tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neo-125M")
# model=torch.load("Gpt_neo_Epoch_10_Loss_031_data_5000.pth",map_location=torch.device('cpu'))
torch.manual_seed(0)
model=torch.load("Gpt_neo_Epoch_10_Loss_031_data_5000.pth",map_location=device)
def predict_query(input_sentence,max_len=40,temp=0.7):
pred=[]
seq=tokenizer(input_sentence,return_tensors='pt')['input_ids'].to(device)
outputs=model.generate(seq,
max_length=max_len,
do_sample=True,
top_p=0.95,
#num_beams=5,
temperature=temp,
no_repeat_ngram_size=3,
num_return_sequences=5
).to(device)
for i,out in enumerate(outputs):
out=tokenizer.decode(out, skip_special_tokens=True)
idx=out.find("<|sep|>")+7
out=out[idx:]
# print(f"Sugestion{i} :{out}")
print("Sugestion: ",out)
pred.append(out)
return pred
# option = st.selectbox(
# 'Please Select option',
# ('Predictive writing',"None"),index=1)
st.title("Text2SQL")
st.write('# Generate SQL Query with Natural Language sentence')
st.markdown("Creator: [Pranav Kushare] (https://github.com/Pranav082001)")
st.sidebar.markdown(
'''
## Select Hyperparameters
''')
max_len = st.sidebar.slider(label='Output Size', min_value=1, max_value=150, value=40, step=1)
# samples = st.sidebar.slider(label='Number of Samples', min_value=1, max_value=50, value=10, step=1)
temp = st.sidebar.slider(label='Temperature (Creativity)', min_value=0.0, max_value=2.0, value=0.7, step=0.1)
# temp = st.sidebar.slider(label='Temperature', min_value=0.1, max_value=1.0, value=5.0, step=0.05)
# do_sample=st.sidebar.checkbox("do_sample")
# max_len=st.slider("max_len",1,100,None,1,key="max_len")
# top_k=st.slider("top_k",1,50,None,1)
# do_sample=st.checkbox("do_sample")
# print(max_len)
sentence = st.text_area('Input your sentence here:')
st.markdown('Example: "Find Average Salary of Employees"')
Enter=st.button("Generate")
clear=st.button("Clear")
if clear:
print(clear)
st.markdown(' ')
if Enter:
st.header("Output-")
print("Generating predictions......\n\n")
# out=generate(sentence,max_len,top_k,do_sample)
torch.manual_seed(0)
out=predict_query(sentence,max_len,temp)
for i,out in enumerate(out):
st.markdown(f"Query {i} :{out}")
|