ZJU-Fangyin commited on
Commit
5d63c09
1 Parent(s): 2a45f64

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +2 -3
README.md CHANGED
@@ -28,14 +28,13 @@ Molecule generation example:
28
  >>> # beam search
29
  >>> molecules = model.generate(input_ids=sf_input["input_ids"],
30
  attention_mask=sf_input["attention_mask"],
31
- max_length=20,
32
  min_length=5,
33
  num_return_sequences=5,
34
  num_beams=5,
35
  past_prompt=None)
36
  >>> sf_output = [tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=True).replace(" ","") for g in molecules]
37
- ['[C][=C][C][=C][C][=C][Ring1][=Branch1]', '[C][=C][C][=C][C][=C][C][=C][Ring1][=Branch1]', '[C][=C][C][=C][C][=C][Ring1][=Branch1][C@H1][C][=C][C][=C][C][=C][Ring1][=Branch1]', '[C][=C][C][=C][C][=C][Ring1][=Branch1][C][=C][C][=C][C][=C][Ring1][=Branch1]
38
- ', '[C][=C][C][=C][C][=C][Ring1][=Branch1][C@H1][=C][C][=C][Ring1][=Branch1]']
39
  ```
40
 
41
 
 
28
  >>> # beam search
29
  >>> molecules = model.generate(input_ids=sf_input["input_ids"],
30
  attention_mask=sf_input["attention_mask"],
31
+ max_length=15,
32
  min_length=5,
33
  num_return_sequences=5,
34
  num_beams=5,
35
  past_prompt=None)
36
  >>> sf_output = [tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=True).replace(" ","") for g in molecules]
37
+ ['[C][=C][C][=C][C][=C][Ring1][=Branch1]', '[C][=C][C][=C][C][=C][C][=C][Ring1][=Branch1]', '[C][=C][C][=C][C][=C][Ring1][=Branch1][C][=C][C][=C]', '[C][=C][C][=C][C][=C][Ring1][=Branch1][C@H1][C][=C][C]', '[C][=C][C][=C][C][=C][Ring1][=Branch1][C@H1][=C][C][=C]']
 
38
  ```
39
 
40