VictorSanh commited on
Commit
d135f3e
1 Parent(s): 9709489

Update readme and doc from the 80b repo

Browse files
Files changed (1) hide show
  1. README.md +2 -2
README.md CHANGED
@@ -96,7 +96,7 @@ inputs = processor(prompts, return_tensors="pt").to(device)
96
  # inputs = processor(prompts[0], return_tensors="pt").to(device)
97
 
98
  # Generation args
99
- bad_words_ids = tokenizer(["<image>", "<fake_token_around_image>"], add_special_tokens=False).input_ids
100
 
101
  generated_ids = model.generate(**inputs, bad_words_ids=bad_words_ids, max_length=100)
102
  generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)
@@ -141,7 +141,7 @@ inputs = processor(prompts, add_end_of_utterance_token=False, return_tensors="pt
141
 
142
  # Generation args
143
  exit_condition = processor.tokenizer("<end_of_utterance>", add_special_tokens=False).input_ids
144
- bad_words_ids = tokenizer(["<image>", "<fake_token_around_image>"], add_special_tokens=False).input_ids
145
 
146
  generated_ids = model.generate(**inputs, eos_token_id=exit_condition, bad_words_ids=bad_words_ids, max_length=100)
147
  generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)
 
96
  # inputs = processor(prompts[0], return_tensors="pt").to(device)
97
 
98
  # Generation args
99
+ bad_words_ids = processor.tokenizer(["<image>", "<fake_token_around_image>"], add_special_tokens=False).input_ids
100
 
101
  generated_ids = model.generate(**inputs, bad_words_ids=bad_words_ids, max_length=100)
102
  generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)
 
141
 
142
  # Generation args
143
  exit_condition = processor.tokenizer("<end_of_utterance>", add_special_tokens=False).input_ids
144
+ bad_words_ids = processor.tokenizer(["<image>", "<fake_token_around_image>"], add_special_tokens=False).input_ids
145
 
146
  generated_ids = model.generate(**inputs, eos_token_id=exit_condition, bad_words_ids=bad_words_ids, max_length=100)
147
  generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)