File size: 224 Bytes
cb63820
 
 
 
 
 
1
2
3
4
5
6
7
from transformers import XLMRobertaForMaskedLM, XLMRobertaConfig
config = XLMRobertaConfig.from_pretrained("./")
model = XLMRobertaForMaskedLM.from_pretrained("./",config=config,from_flax=True)
model.save_pretrained("./")