Init cpm-bee-5b
Browse files- config.json +15 -0
- pytorch_model.bin +3 -0
- vocab.txt +0 -0
config.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"vocab_size": 86583,
|
3 |
+
"dim_model": 4096,
|
4 |
+
"dim_ff" : 10240,
|
5 |
+
"num_layers" : 48,
|
6 |
+
"num_heads": 32,
|
7 |
+
"dim_head" : 128,
|
8 |
+
"dropout_p" : 0.0,
|
9 |
+
"position_bias_num_buckets" : 256,
|
10 |
+
"position_bias_num_segment_buckets": 256,
|
11 |
+
"position_bias_max_distance" : 2048,
|
12 |
+
"eps" : 1e-6,
|
13 |
+
"half" : true,
|
14 |
+
"mask_modules": [[false, false], [true, false], [false, false], [true, false], [true, true], [true, false], [true, true], [true, true], [false, false], [false, false], [true, true], [true, false], [true, false], [true, true], [false, false], [true, true], [false, false], [false, true], [true, false], [true, true], [false, false], [false, true], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [false, false], [true, true], [true, false], [true, true], [true, true], [true, true], [false, false], [true, true], [false, false], [true, true], [true, true], [false, false], [true, true], [false, false], [false, false]]
|
15 |
+
}
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a607d55060b0ee08736cfcc29e9d8d5311975b183525ab34ce3afa039ab534dd
|
3 |
+
size 9299706809
|
vocab.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|