at676 commited on
Commit
ff989d8
1 Parent(s): 121bbdd

Upload folder using huggingface_hub (#2)

Browse files

- 76fd006010c5742243be21940bfd4c1cf1e7c99ac9fe08a6ced9f4877a3f9669 (bb922e120dbff7c6aaaac158e8c63d4296174472)
- 79c3be7d7e4888d8b492b3474c39cb32523ca076a8c5e9556170907182f99ee2 (32120dbfd32ce4dd16836022a659e9563cd29d35)
- 72651ae3768767be9580c40a1b8e51aa941ab413f78a23ea3c998e75a6e54507 (faacf93e5e68123b5b49e4189760d18a1d36537f)
- bf3d968935eb6f3540e19511aa8bdea896c734967d8e8e783078260c190394c8 (02b9e2b320ce0518f759af3c2af3aed3ee96ac03)
- 263044c915cf26ebb2b57cc1614c2665dcb2fa3fd146109f2112036fe9dc01c8 (5ad8d846449dda1732963706aead1e8bb8e8b0b2)
- 4a64b3e83cede3c1173caf9f0529ba2f2df5dba1007a8be5e9077896e29253b8 (db22aafa902bdb6381a2741fc017a2f59f190e31)

config.json CHANGED
@@ -4,6 +4,7 @@
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
 
7
  "bos_token_id": 1,
8
  "eos_token_id": 2,
9
  "hidden_act": "silu",
@@ -34,7 +35,7 @@
34
  "rope_theta": 10000.0,
35
  "tie_word_embeddings": false,
36
  "torch_dtype": "float16",
37
- "transformers_version": "4.34.0",
38
  "use_cache": true,
39
  "vocab_size": 32000
40
  }
 
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
 
35
  "rope_theta": 10000.0,
36
  "tie_word_embeddings": false,
37
  "torch_dtype": "float16",
38
+ "transformers_version": "4.36.2",
39
  "use_cache": true,
40
  "vocab_size": 32000
41
  }
generation_config.json CHANGED
@@ -6,5 +6,5 @@
6
  "pad_token_id": 0,
7
  "temperature": 0.6,
8
  "top_p": 0.9,
9
- "transformers_version": "4.34.0"
10
  }
 
6
  "pad_token_id": 0,
7
  "temperature": 0.6,
8
  "top_p": 0.9,
9
+ "transformers_version": "4.36.2"
10
  }
model-00001-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:497740a8d6d9fe102ee712f699c8a3e3c6a87bebdcd65b9b053c9a79c3c7bd9f
3
+ size 4932538570
model-00002-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6056a5a5bbeb4e4a7f6857f6795de59cfdc2986a1d5b6a85e254cb610379f2de
3
+ size 4962494194
model-00003-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3cc60f476e16979c0ce4a646a4cc546105da6526f0e233ab2794e118cf701fe
3
+ size 4993905052
model-00004-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeb0b4a4cb6d932a6c0918ac2831d0bab7dfcff4d96b725cb2406d76f8b41183
3
+ size 4962494194
model-00005-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc7b29a1078f1c8ccb8d5ed04de608b01e8bb0724419bb96107c5f145e92346e
3
+ size 4993905052
model-00006-of-00006.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d0751513f7f1dd696c1b0883dfc85bfff879f74c3c7f21b5aed959a6ffed15f
3
+ size 1897188890
model.safetensors.index.json CHANGED
The diff for this file is too large to render. See raw diff