|
from transformers import FlaxRobertaForMaskedLM, RobertaForMaskedLM, AutoTokenizer |
|
import jax |
|
import jax.numpy as jnp |
|
|
|
|
|
def to_f32(t): |
|
return jax.tree_map( |
|
lambda x: x.astype(jnp.float32) if x.dtype == jnp.bfloat16 else x, t |
|
) |
|
|
|
|
|
|
|
model = FlaxRobertaForMaskedLM.from_pretrained("./") |
|
|
|
model.params = to_f32(model.params) |
|
|
|
model.save_pretrained("./") |
|
|
|
|
|
model_pt = RobertaForMaskedLM.from_pretrained("./", from_flax=True) |
|
model_pt.save_pretrained("./") |
|
|
|
tokenizer = AutoTokenizer.from_pretrained("./") |
|
tokenizer.save_pretrained("./") |
|
|
|
|