akirus commited on
Commit
771bb0c
·
verified ·
1 Parent(s): 09c7372

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -1
README.md CHANGED
@@ -50,12 +50,13 @@ This version of the Google T5-Base model has been fine-tuned on a bilingual data
50
  `"translate Lezghian to Russian: "` - Lez-Ru
51
 
52
  ## How to Get Started with the Model
 
 
53
  from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
54
 
55
  model = AutoModelForSeq2SeqLM.from_pretrained("leks-forever/mt5-base")
56
  tokenizer = AutoTokenizer.from_pretrained("leks-forever/mt5-base")
57
 
58
- ```python
59
  def predict(text, prefix, a=32, b=3, max_input_length=1024, num_beams=1, **kwargs):
60
  inputs = tokenizer(prefix + text, return_tensors='pt', padding=True, truncation=True, max_length=max_input_length)
61
  result = model.generate(
 
50
  `"translate Lezghian to Russian: "` - Lez-Ru
51
 
52
  ## How to Get Started with the Model
53
+
54
+ ```python
55
  from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
56
 
57
  model = AutoModelForSeq2SeqLM.from_pretrained("leks-forever/mt5-base")
58
  tokenizer = AutoTokenizer.from_pretrained("leks-forever/mt5-base")
59
 
 
60
  def predict(text, prefix, a=32, b=3, max_input_length=1024, num_beams=1, **kwargs):
61
  inputs = tokenizer(prefix + text, return_tensors='pt', padding=True, truncation=True, max_length=max_input_length)
62
  result = model.generate(