Text2Text Generation
Transformers
PyTorch
Safetensors
English
t5
Inference Endpoints
text-generation-inference
machineteacher commited on
Commit
ffc95d8
1 Parent(s): deb5584

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -70,7 +70,7 @@ from transformers import AutoTokenizer, T5ForConditionalGeneration
70
 
71
  tokenizer = AutoTokenizer.from_pretrained("grammarly/coedit-xxl")
72
  model = T5ForConditionalGeneration.from_pretrained("grammarly/coedit-xxl")
73
- input_text = 'Fix grammatical errors in this sentence: New kinds of vehicles will be invented with new technology than today.'
74
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
75
  outputs = model.generate(input_ids, max_length=256)
76
  edited_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
 
70
 
71
  tokenizer = AutoTokenizer.from_pretrained("grammarly/coedit-xxl")
72
  model = T5ForConditionalGeneration.from_pretrained("grammarly/coedit-xxl")
73
+ input_text = 'Fix grammatical errors in this sentence: When I grow up, I start to understand what he said is quite right.'
74
  input_ids = tokenizer(input_text, return_tensors="pt").input_ids
75
  outputs = model.generate(input_ids, max_length=256)
76
  edited_text = tokenizer.decode(outputs[0], skip_special_tokens=True)