LiChenYi commited on
Commit
38a440f
1 Parent(s): 7d2b9ed

预训练之后的模型和llama-2-7b-hf合并,量化

Browse files
.gitattributes CHANGED
@@ -35,3 +35,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  pytorch_model-00001-of-00002.bin filter=lfs diff=lfs merge=lfs -text
37
  pytorch_model-00002-of-00002.bin filter=lfs diff=lfs merge=lfs -text
 
 
 
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  pytorch_model-00001-of-00002.bin filter=lfs diff=lfs merge=lfs -text
37
  pytorch_model-00002-of-00002.bin filter=lfs diff=lfs merge=lfs -text
38
+ ggml-model-f16.gguf filter=lfs diff=lfs merge=lfs -text
39
+ ggml-model-q6_K.bin filter=lfs diff=lfs merge=lfs -text
ggml-model-f16.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4578d0c031bf079018b1e6571dc78ebef7a5f8415a58692869a965c8c359fc60
3
+ size 13860294144
ggml-model-q6_K.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5c973ebd175308285f216ff7202dd1c3b7f985faf32904f69e3ebc0d73585db
3
+ size 5686251040
pytorch_model-00001-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5d5fdd282e77dc26aa62a80cdf7ba8d5b2c4ddc280cef5b07b60c4b8d3cba6d3
3
  size 10167475414
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ab01965ad9b9233852bc8e5cdf125d21e70ce1edc00100c1d564e4054e753f9
3
  size 10167475414
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ca7578414c186e0b8b39ef9805362c1f402ac9a9910a7213b0ac042fbb7f44e9
3
  size 3691156371
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:956b0e223bb776d555b2adc493a7379077ad9c304d764a28c6b9be16435d4006
3
  size 3691156371
special_tokens_map.json CHANGED
@@ -13,7 +13,6 @@
13
  "rstrip": false,
14
  "single_word": false
15
  },
16
- "pad_token": "<pad>",
17
  "unk_token": {
18
  "content": "<unk>",
19
  "lstrip": false,
 
13
  "rstrip": false,
14
  "single_word": false
15
  },
 
16
  "unk_token": {
17
  "content": "<unk>",
18
  "lstrip": false,