Jack817 commited on
Commit
3a0a8b0
1 Parent(s): 15aaf07
Files changed (3) hide show
  1. config.json +26 -0
  2. pytorch_model.bin +3 -0
  3. resolve/main/config.json +26 -0
config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "attn_pdrop": 0.1,
3
+ "embd_pdrop": 0.1,
4
+ "finetuning_task": null,
5
+ "initializer_range": 0.02,
6
+ "layer_norm_epsilon": 1e-05,
7
+ "n_ctx": 300,
8
+ "n_embd": 768,
9
+ "n_head": 12,
10
+ "n_layer": 10,
11
+ "n_positions": 300,
12
+ "num_labels": 1,
13
+ "output_attentions": false,
14
+ "output_hidden_states": false,
15
+ "output_past": true,
16
+ "pruned_heads": {},
17
+ "resid_pdrop": 0.1,
18
+ "summary_activation": null,
19
+ "summary_first_dropout": 0.1,
20
+ "summary_proj_to_labels": true,
21
+ "summary_type": "cls_index",
22
+ "summary_use_proj": true,
23
+ "torchscript": false,
24
+ "use_bfloat16": false,
25
+ "vocab_size": 13317
26
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a9ce96eaea90efb66fb0e61a5d459ce96ebbee31609cb9f6aba30c4f877749c
3
+ size 328981348
resolve/main/config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "attn_pdrop": 0.1,
3
+ "embd_pdrop": 0.1,
4
+ "finetuning_task": null,
5
+ "initializer_range": 0.02,
6
+ "layer_norm_epsilon": 1e-05,
7
+ "n_ctx": 300,
8
+ "n_embd": 768,
9
+ "n_head": 12,
10
+ "n_layer": 10,
11
+ "n_positions": 300,
12
+ "num_labels": 1,
13
+ "output_attentions": false,
14
+ "output_hidden_states": false,
15
+ "output_past": true,
16
+ "pruned_heads": {},
17
+ "resid_pdrop": 0.1,
18
+ "summary_activation": null,
19
+ "summary_first_dropout": 0.1,
20
+ "summary_proj_to_labels": true,
21
+ "summary_type": "cls_index",
22
+ "summary_use_proj": true,
23
+ "torchscript": false,
24
+ "use_bfloat16": false,
25
+ "vocab_size": 13317
26
+ }