OxxoCodes commited on
Commit
e726b82
1 Parent(s): b522dce

Update config.json

Browse files

Fix config to allow loading from HF transformers

Files changed (1) hide show
  1. config.json +7 -2
config.json CHANGED
@@ -1,11 +1,16 @@
1
  {
2
- "_name_or_path": "/scratch/nbrown9/jamba-small",
3
  "architectures": [
4
  "JambaForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "attn_layer_offset": 4,
8
  "attn_layer_period": 8,
 
 
 
 
 
 
9
  "bos_token_id": 1,
10
  "calc_logits_for_entire_prompt": false,
11
  "eos_token_id": 2,
@@ -38,6 +43,6 @@
38
  "torch_dtype": "bfloat16",
39
  "transformers_version": "4.39.3",
40
  "use_cache": true,
41
- "use_mamba_kernels": false,
42
  "vocab_size": 65536
43
  }
 
1
  {
 
2
  "architectures": [
3
  "JambaForCausalLM"
4
  ],
5
  "attention_dropout": 0.0,
6
  "attn_layer_offset": 4,
7
  "attn_layer_period": 8,
8
+ "auto_map": {
9
+ "AutoConfig": "configuration_jamba.JambaConfig",
10
+ "AutoModel": "modeling_jamba.JambaModel",
11
+ "AutoModelForCausalLM": "modeling_jamba.JambaForCausalLM",
12
+ "AutoModelForSequenceClassification": "model.JambaForSequenceClassification"
13
+ },
14
  "bos_token_id": 1,
15
  "calc_logits_for_entire_prompt": false,
16
  "eos_token_id": 2,
 
43
  "torch_dtype": "bfloat16",
44
  "transformers_version": "4.39.3",
45
  "use_cache": true,
46
+ "use_mamba_kernels": true,
47
  "vocab_size": 65536
48
  }