File size: 317 Bytes
2835721
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
from transformers.modeling_flax_pytorch_utils import load_flax_checkpoint_in_pytorch_model
from transformers import RobertaConfig, RobertaModel


config = RobertaConfig.from_pretrained("./")
model = RobertaModel(config)
load_flax_checkpoint_in_pytorch_model(model, "./flax_model.msgpack")
model.save_pretrained("./")