File size: 587 Bytes
bdb0145
 
 
 
 
 
2dcc55d
bdb0145
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("ansumanpandey/codgen-finetuned-SQLQueryGeneration")
model = AutoModelForCausalLM.from_pretrained("ansumanpandey/codgen-finetuned-SQLQueryGeneration")

def get_sql(query):
  input_text = "Query to %s </s>" % query
  features = tokenizer([input_text], return_tensors='pt')

  output = model.generate(input_ids=features['input_ids'],
               attention_mask=features['attention_mask'],
               max_new_tokens=70)
  sql_query= tokenizer.decode(output[0])
  return sql_query