1littlecoder commited on
Commit
71c6df3
1 Parent(s): a39a06d

Upload 2 files

Browse files
demo-leaderboard/gpt2-demo/results_2023-11-21T18-10-08.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model_dtype": "torch.float16",
4
+ "model_name": "demo-leaderboard/gpt2-demo",
5
+ "model_sha": "ac3299b02780836378b9e1e68c6eead546e89f90"
6
+ },
7
+ "results": {
8
+ "anli_r1": {
9
+ "acc": 0
10
+ },
11
+ "logiqa": {
12
+ "acc_norm": 0.90
13
+ }
14
+ }
15
+ }
demo-leaderboard/gpt2-demo/results_2023-11-22 15:46:20.425378.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "results": {
3
+ "anli_r1": {
4
+ "acc": 0.4,
5
+ "acc_stderr": 0.11239029738980327
6
+ },
7
+ "logiqa": {
8
+ "acc": 0.35,
9
+ "acc_stderr": 0.10942433098048308,
10
+ "acc_norm": 0.3,
11
+ "acc_norm_stderr": 0.10513149660756933
12
+ }
13
+ },
14
+ "versions": {
15
+ "anli_r1": 0,
16
+ "logiqa": 0
17
+ },
18
+ "config": {
19
+ "model": "hf-causal-experimental",
20
+ "model_args": "pretrained=demo-leaderboard/gpt2-demo,revision=main,dtype=bfloat16",
21
+ "num_fewshot": 0,
22
+ "batch_size": 1,
23
+ "batch_sizes": [],
24
+ "device": "cpu",
25
+ "no_cache": true,
26
+ "limit": 20,
27
+ "bootstrap_iters": 100000,
28
+ "description_dict": null,
29
+ "model_dtype": "bfloat16",
30
+ "model_name": "demo-leaderboard/gpt2-demo",
31
+ "model_sha": "main"
32
+ }
33
+ }