|
from transformers import T5ForConditionalGeneration, TFT5ForConditionalGeneration |
|
|
|
pt_model = T5ForConditionalGeneration.from_pretrained(".", from_flax=True) |
|
pt_model.save_pretrained(".") |
|
|
|
|
|
|
|
|
|
|
|
exit() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import tempfile |
|
import jax |
|
import numpy as np |
|
import torch |
|
from jax import numpy as jnp |
|
from transformers import AutoTokenizer, FlaxT5ForConditionalGeneration, T5ForConditionalGeneration |
|
|
|
def to_f32(t): |
|
return jax.tree_map(lambda x: x.astype(jnp.float32) if x.dtype == jnp.bfloat16 else x, t) |
|
|
|
def main(): |
|
|
|
tokenizer = AutoTokenizer.from_pretrained("./") |
|
tokenizer.save_pretrained("./") |
|
|
|
tmp = tempfile.mkdtemp() |
|
flax_model = FlaxT5ForConditionalGeneration.from_pretrained("./") |
|
flax_model.params = to_f32(flax_model.params) |
|
flax_model.save_pretrained(tmp) |
|
|
|
pt_model = T5ForConditionalGeneration.from_pretrained(tmp, from_flax=True) |
|
pt_model.save_pretrained("./", save_config=False) |
|
|
|
input_ids = np.asarray(2 * [128 * [0]], dtype=np.int32) |
|
input_ids_pt = torch.tensor(input_ids) |
|
logits_pt = pt_model(input_ids_pt).logits |
|
print(logits_pt) |
|
logits_fx = flax_model(input_ids).logits |
|
print(logits_fx) |
|
|
|
if __name__ == "__main__": |
|
main() |