File size: 137 Bytes
a71d002 |
1 2 3 4 |
from transformers import RobertaForMaskedLM
model = RobertaForMaskedLM.from_pretrained("./", from_flax=True)
model.save_pretrained("./") |
a71d002 |
1 2 3 4 |
from transformers import RobertaForMaskedLM
model = RobertaForMaskedLM.from_pretrained("./", from_flax=True)
model.save_pretrained("./") |