angelahzyuan commited on
Commit
625d98c
1 Parent(s): 1ea9434

Upload MistralForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -19,7 +19,7 @@
19
  "rope_theta": 1000000.0,
20
  "sliding_window": null,
21
  "tie_word_embeddings": false,
22
- "torch_dtype": "float32",
23
  "transformers_version": "4.36.2",
24
  "use_cache": false,
25
  "vocab_size": 32000
 
1
  {
2
+ "_name_or_path": "UCLA-AGI/Mistral7B-PairRM-SPPO",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
19
  "rope_theta": 1000000.0,
20
  "sliding_window": null,
21
  "tie_word_embeddings": false,
22
+ "torch_dtype": "bfloat16",
23
  "transformers_version": "4.36.2",
24
  "use_cache": false,
25
  "vocab_size": 32000
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e7802c7f7e878b826fc77099000c882433b2b326a39bcbab8cc1b3ea217fcabc
3
- size 9886313368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38e95d7b42bf61f58aee9fa915f4440265b53c15af1cbcb5cdcd38fb8e00ad00
3
+ size 4943162336
model-00002-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:90deee379e900f487bf9577842ec99e0373c0277aca3a0da72ecc1007b97c7f9
3
- size 9999626240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36d2918d5809b64b255c3b9f4a0f462d155deed27ee2b03d24a86fdf850607df
3
+ size 4999819336
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:087eee646587f3c8a033bafc49194ddb874c57cf3e8f9eb8d7f5c94ada66bdb9
3
- size 9081022384
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d73e22f7e4e945b7d0a9af6473c01533d9185e5e87b144a5c539bc2fc663a213
3
+ size 4540516344
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 28966928384
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00003-of-00003.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 14483464192
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00003-of-00003.safetensors",