File size: 1,457 Bytes
fba41a4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
{
  "dataset_revision": "70970daeab8776df92f5ea462b6173c0b46fd2d1",
  "mteb_dataset_name": "TwitterSemEval2015",
  "mteb_version": "1.1.0",
  "test": {
    "cos_sim": {
      "accuracy": 0.841151576563152,
      "accuracy_threshold": 0.760004997253418,
      "ap": 0.6785804277968714,
      "f1": 0.6408006919560113,
      "f1_threshold": 0.7039602994918823,
      "precision": 0.602602835231234,
      "recall": 0.6841688654353562
    },
    "dot": {
      "accuracy": 0.841151576563152,
      "accuracy_threshold": 0.7600049376487732,
      "ap": 0.6785805581908406,
      "f1": 0.6408006919560113,
      "f1_threshold": 0.7039604187011719,
      "precision": 0.602602835231234,
      "recall": 0.6841688654353562
    },
    "euclidean": {
      "accuracy": 0.841151576563152,
      "accuracy_threshold": 0.6928131580352783,
      "ap": 0.678580330833871,
      "f1": 0.6408006919560113,
      "f1_threshold": 0.7694668769836426,
      "precision": 0.602602835231234,
      "recall": 0.6841688654353562
    },
    "evaluation_time": 8.91,
    "manhattan": {
      "accuracy": 0.8396614412588663,
      "accuracy_threshold": 10.996787071228027,
      "ap": 0.6766935756595975,
      "f1": 0.6382363570654138,
      "f1_threshold": 12.064617156982422,
      "precision": 0.5872312125914432,
      "recall": 0.6989445910290237
    },
    "max": {
      "accuracy": 0.841151576563152,
      "ap": 0.6785805581908406,
      "f1": 0.6408006919560113
    }
  }
}