hhhhzy commited on
Commit
da747dc
1 Parent(s): 52c9c23

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -123,7 +123,7 @@ The model trained with a p4d.24xlarge instance on aws sagemaker, with the follow
123
  - weight decay: 0.01
124
 
125
  ## Inference example
126
- '''
127
  from modeling_deltalm import DeltalmForConditionalGeneration # download from https://huggingface.co/hhhhzy/deltalm-base-xlsum/blob/main/modeling_deltalm.py
128
  from configuration_deltalm import DeltalmConfig # download from https://huggingface.co/hhhhzy/deltalm-base-xlsum/blob/main/configuration_deltalm.py
129
  from transformers import AutoTokenizer
@@ -136,4 +136,4 @@ inputs = tokenizer(text, max_length=512, return_tensors="pt")
136
 
137
  generate_ids = model.generate(inputs["input_ids"], max_length=512)
138
  tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
139
- '''
 
123
  - weight decay: 0.01
124
 
125
  ## Inference example
126
+ ```
127
  from modeling_deltalm import DeltalmForConditionalGeneration # download from https://huggingface.co/hhhhzy/deltalm-base-xlsum/blob/main/modeling_deltalm.py
128
  from configuration_deltalm import DeltalmConfig # download from https://huggingface.co/hhhhzy/deltalm-base-xlsum/blob/main/configuration_deltalm.py
129
  from transformers import AutoTokenizer
 
136
 
137
  generate_ids = model.generate(inputs["input_ids"], max_length=512)
138
  tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
139
+ ```