zhaode commited on
Commit
d1475b0
·
verified ·
1 Parent(s): da628fa

Upload folder using huggingface_hub

Browse files
Files changed (5) hide show
  1. config.json +8 -0
  2. llm.mnn +3 -0
  3. llm.mnn.json +3 -0
  4. llm.mnn.weight +3 -0
  5. llm_config.json +14 -0
config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "llm_model": "llm.mnn",
3
+ "llm_weight": "llm.mnn.weight",
4
+ "backend_type": "cpu",
5
+ "thread_num": 4,
6
+ "precision": "low",
7
+ "memory": "low"
8
+ }
llm.mnn ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81db0cd203f3fc537d53111810e275df5ec2fc93c06a07ebe9a472a6799aeae2
3
+ size 2636368
llm.mnn.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b26bcd7f4c6b7b2dfa755ef975b02e0afff3bb8f0d3ef6e4ccbd090c9bce908
3
+ size 16736537
llm.mnn.weight ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:749e2efb3133b0f79b46ba049ee3ba5612374c31689a447ee867700aa50b82b4
3
+ size 3716485974
llm_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "hidden_size": 4096,
3
+ "layer_nums": 32,
4
+ "attention_mask": "float",
5
+ "key_value_shape": [
6
+ 2,
7
+ 1,
8
+ 0,
9
+ 32,
10
+ 128
11
+ ],
12
+ "prompt_template": "[INST]%s[/INST]",
13
+ "is_visual": false
14
+ }