File size: 136 Bytes
f98be5f
 
 
1
2
3
from transformers import RobertaForMaskedLM
model = RobertaForMaskedLM.from_pretrained("./", from_flax=True)
model.save_pretrained("./")