Dampish commited on
Commit
b71a026
1 Parent(s): 7234286

Upload 3 files

Browse files
Files changed (3) hide show
  1. config.json +46 -0
  2. generation_config.json +5 -0
  3. pytorch_model.bin +3 -0
config.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "mosaicml/mpt-1b-redpajama-200b",
3
+ "alibi": true,
4
+ "alibi_bias_max": 8,
5
+ "architectures": [
6
+ "MosaicGPT"
7
+ ],
8
+ "attn_clip_qkv": null,
9
+ "attn_impl": "torch",
10
+ "attn_pdrop": 0,
11
+ "attn_qk_ln": true,
12
+ "attn_uses_sequence_id": false,
13
+ "auto_map": {
14
+ "AutoConfig": "configuration_mosaic_gpt.MosaicGPTConfig",
15
+ "AutoModelForCausalLM": "mosaic_gpt.MosaicGPT"
16
+ },
17
+ "d_model": 2048,
18
+ "emb_init_std": null,
19
+ "emb_init_uniform_lim": null,
20
+ "emb_pdrop": 0,
21
+ "embedding_fraction": 1.0,
22
+ "fan_mode": "fan_in",
23
+ "init_device": "cpu",
24
+ "init_div_is_residual": true,
25
+ "init_gain": 0,
26
+ "init_nonlinearity": "relu",
27
+ "init_std": 0.02,
28
+ "logit_scale": null,
29
+ "low_precision_layernorm": true,
30
+ "max_seq_len": 2048,
31
+ "mlp_ratio": 4,
32
+ "model_type": "mosaic_gpt",
33
+ "n_heads": 16,
34
+ "n_layers": 24,
35
+ "no_bias": true,
36
+ "param_init_fn": "kaiming_normal_",
37
+ "prefix_lm": false,
38
+ "resid_pdrop": 0,
39
+ "softmax_scale": null,
40
+ "tokenizer_name": "EleutherAI/gpt-neox-20b",
41
+ "torch_dtype": "float32",
42
+ "transformers_version": "4.27.4",
43
+ "use_cache": false,
44
+ "verbose": 0,
45
+ "vocab_size": 50432
46
+ }
generation_config.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "transformers_version": "4.27.4",
4
+ "use_cache": false
5
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f195ac04c4300f0c0cf51f97d1e77580353699d0f56285072e38f555dbd68c1
3
+ size 5245834073