whizzzzkid commited on
Commit
d49614a
1 Parent(s): 312d44f

Upload StableLmForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. model.safetensors +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "axalotl_ft",
3
  "architectures": [
4
  "StableLmForCausalLM"
5
  ],
@@ -23,7 +23,7 @@
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.38.2",
26
- "use_cache": true,
27
  "use_qkv_bias": true,
28
  "vocab_size": 100352
29
  }
 
1
  {
2
+ "_name_or_path": "model_dir_0122",
3
  "architectures": [
4
  "StableLmForCausalLM"
5
  ],
 
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.38.2",
26
+ "use_cache": false,
27
  "use_qkv_bias": true,
28
  "vocab_size": 100352
29
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2d67caa172654ea3fb257cec537de3c77b34acb1685fabf01a41213883305087
3
  size 3289069520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d57bedc024941a4f9f3dd88aa35adef4d61531a0c983aff598416cec3c936dc
3
  size 3289069520