mayank-mishra commited on
Commit
32e6e13
1 Parent(s): e1b7bf8

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -0
config.json CHANGED
@@ -1,4 +1,7 @@
1
  {
 
 
 
2
  "attention_bias": true,
3
  "attention_dropout": 0.1,
4
  "attention_multiplier": 0.015625,
 
1
  {
2
+ "architectures": [
3
+ "GraniteForCausalLM"
4
+ ],
5
  "attention_bias": true,
6
  "attention_dropout": 0.1,
7
  "attention_multiplier": 0.015625,