Add attention_bias to make TGI work

#4
by philschmid HF staff - opened
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -182,5 +182,6 @@
182
  "torch_dtype": "bfloat16",
183
  "transformers_version": "4.38.1",
184
  "use_cache": true,
 
185
  "vocab_size": 100352
186
  }
 
182
  "torch_dtype": "bfloat16",
183
  "transformers_version": "4.38.1",
184
  "use_cache": true,
185
+ "attention_bias": false,
186
  "vocab_size": 100352
187
  }