File size: 647 Bytes
f8b9bbf
 
 
1e49201
f8b9bbf
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
from transformers import AutoTokenizer
from retnet.modeling_retnet import RetNetForCausalLM

model = RetNetForCausalLM.from_pretrained("./")
tokenizer = AutoTokenizer.from_pretrained('gpt2')
tokenizer.model_max_length = 16384
tokenizer.pad_token = tokenizer.eos_token
tokenizer.unk_token = tokenizer.eos_token
tokenizer.bos_token = tokenizer.eos_token

inputs = tokenizer("Hello, my dog is cute and ", return_tensors="pt")

# Generate output with max_length parameter
generation_output = model.generate(**inputs, max_length=50)  # Adjust max_length as needed
output = tokenizer.decode(generation_output[0], skip_special_tokens=True)

print(output)