Add attention_bias to make TGI work

#5
by philschmid HF staff - opened
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -165,5 +165,6 @@
165
  "torch_dtype": "bfloat16",
166
  "transformers_version": "4.39.3",
167
  "use_cache": true,
 
168
  "vocab_size": 32064
169
  }
 
165
  "torch_dtype": "bfloat16",
166
  "transformers_version": "4.39.3",
167
  "use_cache": true,
168
+ "attention_bias": false,
169
  "vocab_size": 32064
170
  }