Joemgu commited on
Commit
9177530
1 Parent(s): 9ca21ab

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -1
config.json CHANGED
@@ -28,7 +28,6 @@
28
  "relative_attention_max_distance": 128,
29
  "relative_attention_num_buckets": 32,
30
  "tie_word_embeddings": false,
31
- "tie_weights": false,
32
  "torch_dtype": "float32",
33
  "transformers_version": "4.30.2",
34
  "use_cache": true,
 
28
  "relative_attention_max_distance": 128,
29
  "relative_attention_num_buckets": 32,
30
  "tie_word_embeddings": false,
 
31
  "torch_dtype": "float32",
32
  "transformers_version": "4.30.2",
33
  "use_cache": true,