IlPakoZ commited on
Commit
5cd2f4f
·
1 Parent(s): ab04da1

Minor fixes

Browse files
Files changed (2) hide show
  1. README.md +0 -1
  2. modeling_m5_encoder.py +2 -1
README.md CHANGED
@@ -46,7 +46,6 @@ Inputs require SELFIES tokenization **and** a precomputed distance matrix
46
  (`relative_position`). Use the helper bundled in the repo:
47
 
48
  ```python
49
-
50
  tokenizer = AutoTokenizer.from_pretrained("IlPakoZ/m5-encoder", trust_remote_code=True)
51
 
52
  smiles = "CCO"
 
46
  (`relative_position`). Use the helper bundled in the repo:
47
 
48
  ```python
 
49
  tokenizer = AutoTokenizer.from_pretrained("IlPakoZ/m5-encoder", trust_remote_code=True)
50
 
51
  smiles = "CCO"
modeling_m5_encoder.py CHANGED
@@ -36,6 +36,7 @@ class M5EncoderConfig(T5Config):
36
  relative_attention_max_distance=128,
37
  relative_attention_num_buckets=48,
38
  vocab_size=1032,
 
39
  **kwargs,
40
  ):
41
  super().__init__(d_ff=d_ff,
@@ -50,7 +51,7 @@ class M5EncoderConfig(T5Config):
50
  relative_attention_max_distance=relative_attention_max_distance,
51
  relative_attention_num_buckets=relative_attention_num_buckets,
52
  vocab_size=vocab_size,
53
- num_decoder_layers=0,
54
  **kwargs)
55
 
56
  class M5Encoder(PreTrainedModel):
 
36
  relative_attention_max_distance=128,
37
  relative_attention_num_buckets=48,
38
  vocab_size=1032,
39
+ num_decoder_layers=0,
40
  **kwargs,
41
  ):
42
  super().__init__(d_ff=d_ff,
 
51
  relative_attention_max_distance=relative_attention_max_distance,
52
  relative_attention_num_buckets=relative_attention_num_buckets,
53
  vocab_size=vocab_size,
54
+ num_decoder_layers=num_decoder_layers,
55
  **kwargs)
56
 
57
  class M5Encoder(PreTrainedModel):