at676 commited on
Commit
534fc2d
1 Parent(s): a22ca7a

Upload folder using huggingface_hub (#2)

Browse files

- d602501ed7163da3a0eaf56703287e38ae750b4f034c53da829eee30251a47c7 (9ad3ba8e265ca18301fc1a53c22ebbdd0aaa4614)
- 5df482664c6ec2536f3a75f62ab9220aa9c183309b552eb5151c02ce3804c215 (1cb1445f9a82cb144f889cf3f886670c1a541071)
- d059b4e64110966611418c19da57544a0ea8ae7ff3f8ba6964186baa9484c8e0 (01772722793d36fee4ff996773aa4d1c0d3e1066)
- 7b99b87e6fc62d8277ea3c244e8ebaf37681243bcf9e4e9a1e9787893c090c07 (d454c93be47de0c27864e731a0d74ec7ab719891)
- 788f634db4fc2ab65a524e9170dde4c759a6a5c5f18e5771496c1ae50b1d2ef2 (633d86a9b14fc357c909c1dcc5c04b5dc483f46b)
- 05d770b2c1dea2b91a9fcbb5be5b87a5b0acf2daa2595f707b83b19e4fa69b1d (fcce86e54f05b49361cb7bb255a3e22ba273bb93)

config.json CHANGED
@@ -4,6 +4,7 @@
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
 
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
9
  "hidden_act": "silu",
@@ -34,7 +35,7 @@
34
  "rope_theta": 10000.0,
35
  "tie_word_embeddings": false,
36
  "torch_dtype": "float16",
37
- "transformers_version": "4.34.0",
38
  "use_cache": true,
39
  "vocab_size": 32000
40
  }
 
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
 
35
  "rope_theta": 10000.0,
36
  "tie_word_embeddings": false,
37
  "torch_dtype": "float16",
38
+ "transformers_version": "4.36.2",
39
  "use_cache": true,
40
  "vocab_size": 32000
41
  }
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
- "transformers_version": "4.34.0"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
+ "transformers_version": "4.36.2"
6
  }
model-00001-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5734f4e0daf4c8de27b82d5d0e74b7c8882b2436a708d469e5cb9005407a0ddf
3
+ size 4879110326
model-00002-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd085de775a5dc975cadb0b89d6deedf2fbd889707d656264cecacdc43c0b0b7
3
+ size 4997153846
model-00003-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9fa334c025c836f7b72b2da6f2dbed78082392acb4b6813e3a8982fe65d045c8
3
+ size 4929509530
model-00004-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6cb941cd21b948cc0edd825844f045962d9645e7bb6321d2624211a3cd56624a
3
+ size 4962545266
model-00005-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce5f2254c2cb4617f46ef0df46411161799d71adaaea504f9338b3b7eab05282
3
+ size 4997153846
model-00006-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79682dc66162b566ec1fe55a625949f44bc3ce8ef82d819fe8961644c901348d
3
+ size 592031698
model.safetensors.index.json CHANGED
The diff for this file is too large to render. See raw diff