Prakruti_LLM / main.py
Rohankumar31's picture
Upload 2 files
a696e53
raw
history blame
622 Bytes
import transformers
from transformers import TFGPT2LMHeadModel ,GPT2Tokenizer, BitsAndBytesConfig
import tensorflow as tf
tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
model = TFGPT2LMHeadModel.from_pretrained('gpt2',pad_token_id = tokenizer.eos_token_id)
def generate_text(inp):
input_ids = tokenizer.encode(inp,return_tensors = 'tf')
beam_output = model.generate(input_ids, max_length = 100,num_beams = 5, no_repeat_ngram_size = 2, early_stopping = True)
output = tokenizer.decode(beam_output[0],skip_special_tokens = True, clean_up_tokenization_spaces = True)
return ".".join(output.split(".")[:-1]) + "."