abhinavkulkarni commited on
Commit
d63f29f
1 Parent(s): 80a3023

Upload config

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -6,7 +6,7 @@
6
  "attn_config": {
7
  "alibi": true,
8
  "alibi_bias_max": 8,
9
- "attn_impl": "triton",
10
  "attn_pdrop": 0,
11
  "attn_type": "multihead_attention",
12
  "attn_uses_sequence_id": false,
@@ -45,8 +45,8 @@
45
  "norm_type": "low_precision_layernorm",
46
  "resid_pdrop": 0,
47
  "tokenizer_name": "sam-mosaic/gpt-neox-20b-chatml",
48
- "torch_dtype": "float16",
49
- "transformers_version": "4.30.0.dev0",
50
  "use_cache": false,
51
  "verbose": 0,
52
  "vocab_size": 50432
 
6
  "attn_config": {
7
  "alibi": true,
8
  "alibi_bias_max": 8,
9
+ "attn_impl": "torch",
10
  "attn_pdrop": 0,
11
  "attn_type": "multihead_attention",
12
  "attn_uses_sequence_id": false,
 
45
  "norm_type": "low_precision_layernorm",
46
  "resid_pdrop": 0,
47
  "tokenizer_name": "sam-mosaic/gpt-neox-20b-chatml",
48
+ "torch_dtype": "bfloat16",
49
+ "transformers_version": "4.33.1",
50
  "use_cache": false,
51
  "verbose": 0,
52
  "vocab_size": 50432