nthngdy commited on
Commit
4bc16c4
1 Parent(s): 0f3ad40

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -30,7 +30,7 @@ Our results show that MANTa-LM only slightly degrades the performance of a T5 eq
30
  ```python
31
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
32
 
33
- tokenizer = AutoTokenizer.from_pretrained("google/byt5-base")
34
  manta_model = AutoModelForSeq2SeqLM.from_pretrained("almanach/manta-lm-small", trust_remote_code=True)
35
 
36
  tokens = tokenizer("The name of the capital of France is <extra_id_0> and it is a very big city.", return_tensors="pt")
 
30
  ```python
31
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
32
 
33
+ tokenizer = AutoTokenizer.from_pretrained("almanach/manta-lm-small", trust_remote_code=True)
34
  manta_model = AutoModelForSeq2SeqLM.from_pretrained("almanach/manta-lm-small", trust_remote_code=True)
35
 
36
  tokens = tokenizer("The name of the capital of France is <extra_id_0> and it is a very big city.", return_tensors="pt")