Crystalcareai commited on
Commit
c880ac6
·
verified ·
1 Parent(s): b03ac5f

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -1
config.json CHANGED
@@ -5,6 +5,10 @@
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
 
 
 
 
8
  "bos_token_id": 2,
9
  "eos_token_id": 1,
10
  "head_dim": 256,
@@ -25,4 +29,4 @@
25
  "transformers_version": "4.38.2",
26
  "use_cache": true,
27
  "vocab_size": 256000
28
- }
 
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
+ "auto_map": {
9
+ "AutoModelForCausalLM": "Crystalcareai/Gemmooe--7b-modeling_gemma.GemmaForCausalLM",
10
+ "AutoTokenizer": "Crystalcareai/GemMoE-Beta-1--tokenization_gemmoe.GemmaTokenizer"
11
+ },
12
  "bos_token_id": 2,
13
  "eos_token_id": 1,
14
  "head_dim": 256,
 
29
  "transformers_version": "4.38.2",
30
  "use_cache": true,
31
  "vocab_size": 256000
32
+ }