Update tiny models for StableLmForCausalLM

#60
Files changed (3) hide show
  1. config.json +3 -1
  2. generation_config.json +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -21,13 +21,15 @@
21
  "num_key_value_heads": 4,
22
  "pad_token_id": 0,
23
  "partial_rotary_factor": 0.25,
 
24
  "rope_scaling": null,
25
  "rope_theta": 10000,
26
  "tie_word_embeddings": false,
27
  "torch_dtype": "float32",
28
- "transformers_version": "4.38.0.dev0",
29
  "type_vocab_size": 16,
30
  "use_cache": true,
 
31
  "use_qkv_bias": false,
32
  "vocab_size": 1024
33
  }
 
21
  "num_key_value_heads": 4,
22
  "pad_token_id": 0,
23
  "partial_rotary_factor": 0.25,
24
+ "qk_layernorm": false,
25
  "rope_scaling": null,
26
  "rope_theta": 10000,
27
  "tie_word_embeddings": false,
28
  "torch_dtype": "float32",
29
+ "transformers_version": "4.40.0.dev0",
30
  "type_vocab_size": 16,
31
  "use_cache": true,
32
+ "use_parallel_residual": false,
33
  "use_qkv_bias": false,
34
  "vocab_size": 1024
35
  }
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.38.0.dev0"
7
  }
 
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.40.0.dev0"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:78231a194912c1d1e0f0f474ca66d847722c14e7461bcb63998f7a283f0eefda
3
  size 717384
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a0476d4194b6aa6c20428d995ace3ed6e838579ebba752618d52afe28779a92
3
  size 717384