mrm8488 commited on
Commit
1a09c9b
1 Parent(s): 15c0e73

Upload MistralForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "mistralai/Mistral-7B-v0.1",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -19,7 +19,7 @@
19
  "sliding_window": 4096,
20
  "tie_word_embeddings": false,
21
  "torch_dtype": "bfloat16",
22
- "transformers_version": "4.35.0.dev0",
23
  "use_cache": true,
24
- "vocab_size": 32000
25
  }
 
1
  {
2
+ "_name_or_path": "mrm8488/Mistral-7B-v0.1-inst",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
19
  "sliding_window": 4096,
20
  "tie_word_embeddings": false,
21
  "torch_dtype": "bfloat16",
22
+ "transformers_version": "4.36.0.dev0",
23
  "use_cache": true,
24
+ "vocab_size": 32002
25
  }
generation_config.json CHANGED
@@ -2,5 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
- "transformers_version": "4.35.0.dev0"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
+ "transformers_version": "4.36.0.dev0"
6
  }
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4c5eb04d63d9e124eacd61e6bae8b732b8055767ddac189658b5b16a4fc9ca33
3
- size 4943162336
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d64a3b12c0ce97e3d4f7a9f5475ee72ec101fa2db89d77ec031d6d2bb23429c
3
+ size 4943178720
model-00002-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:688c82274ae2c96563095e31df37baefddc7a9765c72d26afa4f34c5d7aeafae
3
  size 4999819336
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bce6869aa32fff13846bb3ff43789887dfcf50b76bdac56a74cae4543a714e1
3
  size 4999819336
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e61649490f293d7d7311f82da81aefc706deee41d35e80116f815eae14b6a80b
3
- size 4540516344
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5d4390258f4c5230f290380291f8a82ffba240d49a7a189b95d022e5dd48903
3
+ size 4540532728
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 14483464192
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00003-of-00003.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 14483496960
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00003-of-00003.safetensors",