lll2343 commited on
Commit
668d325
·
verified ·
1 Parent(s): 04b5afe

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -7,10 +7,10 @@
7
  "AutoModelForCausalLM": "modeling_sdlm.SDLMQwen2ForCausalLM"
8
  },
9
  "attention_dropout": 0.0,
10
- "attn_implementation": "eager",
11
  "block_size": 8,
12
  "bos_token_id": 151643,
13
- "casual_attn": false,
14
  "eos_token_id": 151643,
15
  "hidden_act": "silu",
16
  "hidden_size": 2048,
 
7
  "AutoModelForCausalLM": "modeling_sdlm.SDLMQwen2ForCausalLM"
8
  },
9
  "attention_dropout": 0.0,
10
+ "attn_implementation": "sdpa",
11
  "block_size": 8,
12
  "bos_token_id": 151643,
13
+ "causal_attn": false,
14
  "eos_token_id": 151643,
15
  "hidden_act": "silu",
16
  "hidden_size": 2048,