File size: 224 Bytes
a340e1f
 
 
 
 
 
1
2
3
4
5
6
7
from transformers import XLMRobertaForMaskedLM, XLMRobertaConfig
config = XLMRobertaConfig.from_pretrained("./")
model = XLMRobertaForMaskedLM.from_pretrained("./",config=config,from_flax=True)
model.save_pretrained("./")