File size: 229 Bytes
05f5321
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
from transformers import GPT2LMHeadModel, AutoTokenizer

model = GPT2LMHeadModel.from_pretrained("./", from_flax=True)
model.save_pretrained("./")

tokenizer = AutoTokenizer.from_pretrained("./")
tokenizer.save_pretrained("./")