from transformers import GPT2LMHeadModel, AutoTokenizer model = GPT2LMHeadModel.from_pretrained("./", from_flax=True) model.save_pretrained("./") tokenizer = AutoTokenizer.from_pretrained("./") tokenizer.save_pretrained("./")