cshin23 commited on
Commit
ea8e22d
1 Parent(s): 5f44cc2

Upload artifacts (small scale)

Browse files
small_scale/results.jsonl CHANGED
@@ -1,40 +1,40 @@
1
- {"key": "vtab/caltech101", "dataset": "Caltech-101", "metrics": {"acc1": 0.2706655710764174, "acc5": 0.6312243221035333, "mean_per_class_recall": 0.2676443272110481, "main_metric": 0.2676443272110481}}
2
- {"key": "cifar10", "dataset": "CIFAR-10", "metrics": {"acc1": 0.5072, "acc5": 0.9584, "mean_per_class_recall": 0.5072, "main_metric": 0.5072}}
3
- {"key": "vtab/cifar100", "dataset": "CIFAR-100", "metrics": {"acc1": 0.2404, "acc5": 0.5432, "mean_per_class_recall": 0.2404, "main_metric": 0.2404}}
4
- {"key": "vtab/clevr_count_all", "dataset": "CLEVR Counts", "metrics": {"acc1": 0.1404, "acc5": 0.6305333333333333, "mean_per_class_recall": 0.13868995715368526, "main_metric": 0.1404}}
5
- {"key": "vtab/clevr_closest_object_distance", "dataset": "CLEVR Distance", "metrics": {"acc1": 0.23206666666666667, "acc5": 0.9186666666666666, "mean_per_class_recall": 0.20381403598840905, "main_metric": 0.23206666666666667}}
6
- {"key": "country211", "dataset": "Country211", "metrics": {"acc1": 0.016066350710900474, "acc5": 0.060284360189573456, "mean_per_class_recall": 0.016066350710900477, "main_metric": 0.016066350710900474}}
7
- {"key": "vtab/dtd", "dataset": "Describable Textures", "metrics": {"acc1": 0.08138297872340426, "acc5": 0.2425531914893617, "mean_per_class_recall": 0.08138297872340425, "main_metric": 0.08138297872340426}}
8
- {"key": "vtab/eurosat", "dataset": "EuroSAT", "metrics": {"acc1": 0.23333333333333334, "acc5": 0.5835185185185185, "mean_per_class_recall": 0.22026093728456045, "main_metric": 0.23333333333333334}}
9
- {"key": "fgvc_aircraft", "dataset": "FGVC Aircraft", "metrics": {"acc1": 0.012001200120012, "acc5": 0.04950495049504951, "mean_per_class_recall": 0.011862745098039217, "main_metric": 0.011862745098039217}}
10
- {"key": "food101", "dataset": "Food-101", "metrics": {"acc1": 0.08166336633663367, "acc5": 0.24724752475247525, "mean_per_class_recall": 0.08166336633663368, "main_metric": 0.08166336633663367}}
11
- {"key": "gtsrb", "dataset": "GTSRB", "metrics": {"acc1": 0.08400633412509897, "acc5": 0.33460015835312745, "mean_per_class_recall": 0.07777214088386991, "main_metric": 0.08400633412509897}}
12
- {"key": "imagenet1k", "dataset": "ImageNet 1k", "metrics": {"acc1": 0.0593, "acc5": 0.16624, "mean_per_class_recall": 0.05928, "main_metric": 0.0593}}
13
- {"key": "imagenet_sketch", "dataset": "ImageNet Sketch", "metrics": {"acc1": 0.023423529642948377, "acc5": 0.06783391302639077, "mean_per_class_recall": 0.02346156862745098, "main_metric": 0.023423529642948377}}
14
- {"key": "imagenetv2", "dataset": "ImageNet v2", "metrics": {"acc1": 0.0506, "acc5": 0.1503, "mean_per_class_recall": 0.0506, "main_metric": 0.0506}}
15
- {"key": "imagenet-a", "dataset": "ImageNet-A", "metrics": {"acc1": 0.0196, "acc5": 0.0824, "mean_per_class_recall": 0.028184268142942587, "main_metric": 0.0196}}
16
- {"key": "imagenet-o", "dataset": "ImageNet-O", "metrics": {"acc1": 0.1475, "acc5": 0.3575, "mean_per_class_recall": 0.13922824636663644, "main_metric": 0.1475}}
17
- {"key": "imagenet-r", "dataset": "ImageNet-R", "metrics": {"acc1": 0.07766666666666666, "acc5": 0.2095, "mean_per_class_recall": 0.07430012433755585, "main_metric": 0.07766666666666666}}
18
- {"key": "vtab/kitti_closest_vehicle_distance", "dataset": "KITTI Vehicle Distance", "metrics": {"acc1": 0.3080168776371308, "acc5": null, "mean_per_class_recall": 0.22749747304582207, "main_metric": 0.3080168776371308}}
19
- {"key": "mnist", "dataset": "MNIST", "metrics": {"acc1": 0.1118, "acc5": 0.6075, "mean_per_class_recall": 0.11067492235694876, "main_metric": 0.1118}}
20
- {"key": "objectnet", "dataset": "ObjectNet", "metrics": {"acc1": 0.05776892430278884, "acc5": 0.17739851405190052, "mean_per_class_recall": 0.05837677793548174, "main_metric": 0.05776892430278884}}
21
- {"key": "vtab/flowers", "dataset": "Oxford Flowers-102", "metrics": {"acc1": 0.053504634899983736, "acc5": 0.17856562042608554, "mean_per_class_recall": 0.05238308678879251, "main_metric": 0.05238308678879251}}
22
- {"key": "vtab/pets", "dataset": "Oxford-IIIT Pet", "metrics": {"acc1": 0.09621150177159989, "acc5": 0.27527936767511585, "mean_per_class_recall": 0.09577083891376224, "main_metric": 0.09577083891376224}}
23
- {"key": "voc2007", "dataset": "Pascal VOC 2007", "metrics": {"acc1": 0.3466880341880342, "acc5": 0.7282986111111112, "mean_per_class_recall": 0.3966012009824842, "main_metric": 0.3466880341880342}}
24
- {"key": "vtab/pcam", "dataset": "PatchCamelyon", "metrics": {"acc1": 0.505218505859375, "acc5": null, "mean_per_class_recall": 0.5054298090415472, "main_metric": 0.505218505859375}}
25
  {"key": "renderedsst2", "dataset": "Rendered SST2", "metrics": {"acc1": 0.500823723228995, "acc5": null, "mean_per_class_recall": 0.5, "main_metric": 0.500823723228995}}
26
- {"key": "vtab/resisc45", "dataset": "RESISC45", "metrics": {"acc1": 0.08619047619047619, "acc5": 0.2704761904761905, "mean_per_class_recall": 0.08790795315159007, "main_metric": 0.08619047619047619}}
27
- {"key": "cars", "dataset": "Stanford Cars", "metrics": {"acc1": 0.02885213281930108, "acc5": 0.11441363014550429, "mean_per_class_recall": 0.028763787052531877, "main_metric": 0.02885213281930108}}
28
- {"key": "stl10", "dataset": "STL-10", "metrics": {"acc1": 0.5845, "acc5": 0.96975, "mean_per_class_recall": 0.5845, "main_metric": 0.5845}}
29
- {"key": "sun397", "dataset": "SUN397", "metrics": {"acc1": 0.12646891148831307, "acc5": 0.3263604097320558, "mean_per_class_recall": 0.10255553291990159, "main_metric": 0.12646891148831307}}
30
- {"key": "vtab/svhn", "dataset": "SVHN", "metrics": {"acc1": 0.08892901044867856, "acc5": 0.506338352796558, "mean_per_class_recall": 0.10773154393098514, "main_metric": 0.08892901044867856}}
31
- {"key": "retrieval/flickr_1k_test_image_text_retrieval", "dataset": "Flickr", "metrics": {"image_retrieval_recall@1": 0.03539999946951866, "text_retrieval_recall@1": 0.06599999964237213, "image_retrieval_recall@5": 0.11900000274181366, "text_retrieval_recall@5": 0.17800000309944153, "image_retrieval_recall@10": 0.18320000171661377, "text_retrieval_recall@10": 0.24699999392032623, "mean_recall@1": 0.050699999555945396, "main_metric": 0.050699999555945396}}
32
- {"key": "retrieval/mscoco_2014_5k_test_image_text_retrieval", "dataset": "MSCOCO", "metrics": {"image_retrieval_recall@1": 0.021791283041238785, "text_retrieval_recall@1": 0.03319999948143959, "image_retrieval_recall@5": 0.07025189697742462, "text_retrieval_recall@5": 0.10360000282526016, "image_retrieval_recall@10": 0.11035585403442383, "text_retrieval_recall@10": 0.1526000052690506, "mean_recall@1": 0.027495641261339188, "main_metric": 0.027495641261339188}}
33
- {"key": "misc/winogavil", "dataset": "WinoGAViL", "metrics": {"avg_jaccard_score": 0.4091041524664875, "jaccard_score_5": 0.4645454545454545, "jaccard_score_6": 0.4165826733861292, "jaccard_score_10": 0.3338810641627543, "jaccard_score_12": 0.2887182910547397, "jaccard_score_5-6": 0.43995324227882365, "jaccard_score_10-12": 0.311246793799487, "main_metric": 0.311246793799487}}
34
- {"key": "wilds/iwildcam", "dataset": "iWildCam", "metrics": {"acc1": 0.03951765558178121, "acc5": 0.11032693790750392, "mean_per_class_recall": 0.0034132111438215846, "acc_avg": 0.03951765596866608, "recall-macro_all": 0.0034132111438215846, "F1-macro_all": 0.0021830244799491373, "main_metric": 0.0021830244799491373}}
35
- {"key": "wilds/camelyon17", "dataset": "Camelyon17", "metrics": {"acc1": 0.5, "acc5": null, "mean_per_class_recall": 0.5, "acc_avg": 0.5, "acc_slide:0": NaN, "count_slide:0": 0.0, "acc_slide:1": NaN, "count_slide:1": 0.0, "acc_slide:2": NaN, "count_slide:2": 0.0, "acc_slide:3": NaN, "count_slide:3": 0.0, "acc_slide:4": NaN, "count_slide:4": 0.0, "acc_slide:5": NaN, "count_slide:5": 0.0, "acc_slide:6": NaN, "count_slide:6": 0.0, "acc_slide:7": NaN, "count_slide:7": 0.0, "acc_slide:8": NaN, "count_slide:8": 0.0, "acc_slide:9": NaN, "count_slide:9": 0.0, "acc_slide:10": NaN, "count_slide:10": 0.0, "acc_slide:11": NaN, "count_slide:11": 0.0, "acc_slide:12": NaN, "count_slide:12": 0.0, "acc_slide:13": NaN, "count_slide:13": 0.0, "acc_slide:14": NaN, "count_slide:14": 0.0, "acc_slide:15": NaN, "count_slide:15": 0.0, "acc_slide:16": NaN, "count_slide:16": 0.0, "acc_slide:17": NaN, "count_slide:17": 0.0, "acc_slide:18": NaN, "count_slide:18": 0.0, "acc_slide:19": NaN, "count_slide:19": 0.0, "acc_slide:20": 0.007086614146828651, "count_slide:20": 3810.0, "acc_slide:21": 0.002707092557102442, "count_slide:21": 3694.0, "acc_slide:22": 0.5865464806556702, "count_slide:22": 7210.0, "acc_slide:23": 0.5221255421638489, "count_slide:23": 5288.0, "acc_slide:24": 0.023812605068087578, "count_slide:24": 7727.0, "acc_slide:25": 0.23742501437664032, "count_slide:25": 4334.0, "acc_slide:26": 0.12975098192691803, "count_slide:26": 3815.0, "acc_slide:27": 0.024143986403942108, "count_slide:27": 4556.0, "acc_slide:28": 0.8469163775444031, "count_slide:28": 31878.0, "acc_slide:29": 0.5245644450187683, "count_slide:29": 12742.0, "acc_wg": 0.002707092557102442, "main_metric": 0.5}}
36
- {"key": "wilds/fmow", "dataset": "FMoW", "metrics": {"acc1": 0.022797177492310475, "acc5": 0.10828659308847476, "mean_per_class_recall": 0.025020635806308363, "acc_avg": 0.022797176614403725, "acc_year:0": NaN, "count_year:0": 0.0, "acc_year:1": NaN, "count_year:1": 0.0, "acc_year:2": NaN, "count_year:2": 0.0, "acc_year:3": NaN, "count_year:3": 0.0, "acc_year:4": NaN, "count_year:4": 0.0, "acc_year:5": NaN, "count_year:5": 0.0, "acc_year:6": NaN, "count_year:6": 0.0, "acc_year:7": NaN, "count_year:7": 0.0, "acc_year:8": NaN, "count_year:8": 0.0, "acc_year:9": NaN, "count_year:9": 0.0, "acc_year:10": NaN, "count_year:10": 0.0, "acc_year:11": NaN, "count_year:11": 0.0, "acc_year:12": NaN, "count_year:12": 0.0, "acc_year:13": NaN, "count_year:13": 0.0, "acc_year:14": 0.024876246228814125, "count_year:14": 15959.0, "acc_year:15": 0.017401203513145447, "count_year:15": 6149.0, "acc_worst_year": 0.017401203513145447, "acc_region:0": 0.01652226410806179, "count_region:0": 4963.0, "acc_region:1": 0.033287812024354935, "count_region:1": 5858.0, "acc_region:2": 0.011569610796868801, "count_region:2": 2593.0, "acc_region:3": 0.02068793587386608, "count_region:3": 8024.0, "acc_region:4": 0.04654654487967491, "count_region:4": 666.0, "acc_region:5": 0.0, "count_region:5": 4.0, "acc_worst_region": 0.0, "main_metric": 0.0}}
37
- {"key": "fairness/dollar_street", "dataset": "Dollar Street", "metrics": {"acc1": 0.15415358264344847, "acc5": 0.39366257493576934, "mean_per_class_recall": 0.1483581370529709, "acc_top5_avg": 0.39366257190704346, "acc_top5_income_ds:0": 0.2535046637058258, "count_income_ds:0": 856.0, "acc_top5_income_ds:1": 0.3484162986278534, "count_income_ds:1": 884.0, "acc_top5_income_ds:2": 0.4461709260940552, "count_income_ds:2": 901.0, "acc_top5_income_ds:3": 0.5243619680404663, "count_income_ds:3": 862.0, "acc_top5_wg": 0.2535046637058258, "main_metric": 0.2535046637058258}}
38
- {"key": "fairness/geode", "dataset": "GeoDE", "metrics": {"acc1": 0.37083600256245997, "acc5": 0.7090807174887892, "mean_per_class_recall": 0.3650473412141806, "acc_avg": 0.3708359897136688, "acc_region:0": 0.3315240144729614, "count_region:0": 2395.0, "acc_region:1": 0.37960198521614075, "count_region:1": 2010.0, "acc_region:2": 0.37253057956695557, "count_region:2": 2126.0, "acc_region:3": 0.3430919349193573, "count_region:3": 1947.0, "acc_region:4": 0.40466704964637756, "count_region:4": 1757.0, "acc_region:5": 0.4007989466190338, "count_region:5": 2253.0, "acc_wg": 0.3315240144729614, "main_metric": 0.3315240144729614}}
39
- {"key": "fairness/fairface", "dataset": "FairFace", "metrics": {"acc_race_avg": 0.7731422185897827, "acc_race_race_binary:0": 0.1088729053735733, "count_race_binary:0": 2085.0, "acc_race_race_binary:1": 0.929304301738739, "count_race_binary:1": 8869.0, "acc_race_wg": 0.1088729053735733, "acc_gender_avg": 0.5082162022590637, "acc_gender_race_binary:0": 0.4853717088699341, "acc_gender_race_binary:1": 0.5135866403579712, "acc_gender_wg": 0.4853717088699341, "acc_age_avg": 0.14798247814178467, "acc_age_race_binary:0": 0.16163069009780884, "acc_age_race_binary:1": 0.144773930311203, "acc_age_wg": 0.144773930311203, "acc_gender_x_avg": 0.5082162022590637, "acc_gender_x_race:0_gender:0": 0.24030037224292755, "count_race:0_gender:0": 799.0, "acc_gender_x_race:0_gender:1": 0.8401585221290588, "count_race:0_gender:1": 757.0, "acc_gender_x_race:1_gender:0": 0.08645275980234146, "count_race:1_gender:0": 1122.0, "acc_gender_x_race:1_gender:1": 0.9501557350158691, "count_race:1_gender:1": 963.0, "acc_gender_x_race:2_gender:0": 0.14873838424682617, "count_race:2_gender:0": 753.0, "acc_gender_x_race:2_gender:1": 0.9384010434150696, "count_race:2_gender:1": 763.0, "acc_gender_x_race:3_gender:0": 0.11349306255578995, "count_race:3_gender:0": 793.0, "acc_gender_x_race:3_gender:1": 0.9614458084106445, "count_race:3_gender:1": 830.0, "acc_gender_x_race:4_gender:0": 0.1045510470867157, "count_race:4_gender:0": 813.0, "acc_gender_x_race:4_gender:1": 0.9595959782600403, "count_race:4_gender:1": 396.0, "acc_gender_x_race:5_gender:0": 0.12244898080825806, "count_race:5_gender:0": 735.0, "acc_gender_x_race:5_gender:1": 0.9544117450714111, "count_race:5_gender:1": 680.0, "acc_gender_x_race:6_gender:0": 0.08365508168935776, "count_race:6_gender:0": 777.0, "acc_gender_x_race:6_gender:1": 0.9598965048789978, "count_race:6_gender:1": 773.0, "acc_gender_x_wg": 0.08365508168935776, "toxicity_crime_avg": 0.14779989421367645, "toxicity_crime_race:0": 0.10475578159093857, "count_race:0": 1556.0, "toxicity_crime_race:1": 0.2028777003288269, "count_race:1": 2085.0, "toxicity_crime_race:2": 0.10488126426935196, "count_race:2": 1516.0, "toxicity_crime_race:3": 0.16574245691299438, "count_race:3": 1623.0, "toxicity_crime_race:4": 0.14143919944763184, "count_race:4": 1209.0, "toxicity_crime_race:5": 0.14275617897510529, "count_race:5": 1415.0, "toxicity_crime_race:6": 0.14967742562294006, "count_race:6": 1550.0, "toxicity_crime_wg": 0.10475578159093857, "toxicity_nonhuman_avg": 0.3110279440879822, "toxicity_nonhuman_race:0": 0.5134961605072021, "toxicity_nonhuman_race:1": 0.21870504319667816, "toxicity_nonhuman_race:2": 0.3924802243709564, "toxicity_nonhuman_race:3": 0.25446704030036926, "toxicity_nonhuman_race:4": 0.28535979986190796, "toxicity_nonhuman_race:5": 0.3024734854698181, "toxicity_nonhuman_race:6": 0.23935483396053314, "toxicity_nonhuman_wg": 0.21870504319667816, "main_metric": null}}
40
- {"key": "fairness/utkface", "dataset": "UTKFace", "metrics": {"acc_race_avg": 0.5751170516014099, "acc_race_race_binary:0": 0.21288210153579712, "count_race_binary:0": 10076.0, "acc_race_race_binary:1": 0.8429588079452515, "count_race_binary:1": 13627.0, "acc_race_wg": 0.21288210153579712, "acc_gender_avg": 0.49630847573280334, "acc_gender_race_binary:0": 0.46963080763816833, "acc_gender_race_binary:1": 0.5160343647003174, "acc_gender_wg": 0.46963080763816833, "acc_age_avg": 0.2622452974319458, "acc_age_race_binary:0": 0.17993251979351044, "acc_age_race_binary:1": 0.3231085240840912, "acc_age_wg": 0.17993251979351044, "acc_gender_x_avg": 0.49630847573280334, "acc_gender_x_race:0_gender:0": 0.1389128565788269, "count_race:0_gender:0": 2318.0, "acc_gender_x_race:0_gender:1": 0.9384058117866516, "count_race:0_gender:1": 2208.0, "acc_gender_x_race:1_gender:0": 0.030496712774038315, "count_race:1_gender:0": 5476.0, "acc_gender_x_race:1_gender:1": 0.9923912882804871, "count_race:1_gender:1": 4600.0, "acc_gender_x_race:2_gender:0": 0.06413091719150543, "count_race:2_gender:0": 2261.0, "acc_gender_x_race:2_gender:1": 0.9719953536987305, "count_race:2_gender:1": 1714.0, "acc_gender_x_race:3_gender:0": 0.02539682574570179, "count_race:3_gender:0": 1575.0, "acc_gender_x_race:3_gender:1": 0.9860140085220337, "count_race:3_gender:1": 1859.0, "acc_gender_x_race:4_gender:0": 0.04210526496171951, "count_race:4_gender:0": 760.0, "acc_gender_x_race:4_gender:1": 0.9892703890800476, "count_race:4_gender:1": 932.0, "acc_gender_x_wg": 0.02539682574570179, "toxicity_crime_avg": 0.10007172077894211, "toxicity_crime_race:0": 0.0720282793045044, "count_race:0": 4526.0, "toxicity_crime_race:1": 0.11780468374490738, "count_race:1": 10076.0, "toxicity_crime_race:2": 0.09685534238815308, "count_race:2": 3975.0, "toxicity_crime_race:3": 0.08474082499742508, "count_race:3": 3434.0, "toxicity_crime_race:4": 0.10815602540969849, "count_race:4": 1692.0, "toxicity_crime_wg": 0.0720282793045044, "toxicity_nonhuman_avg": 0.2186221182346344, "toxicity_nonhuman_race:0": 0.45802032947540283, "toxicity_nonhuman_race:1": 0.1283247321844101, "toxicity_nonhuman_race:2": 0.28050315380096436, "toxicity_nonhuman_race:3": 0.13249854743480682, "toxicity_nonhuman_race:4": 0.14539006352424622, "toxicity_nonhuman_wg": 0.1283247321844101, "main_metric": null}}
 
1
+ {"key": "vtab/caltech101", "dataset": "Caltech-101", "metrics": {"acc1": 0.2933442892358258, "acc5": 0.6044371405094494, "mean_per_class_recall": 0.2798245455230044, "main_metric": 0.2798245455230044}}
2
+ {"key": "cifar10", "dataset": "CIFAR-10", "metrics": {"acc1": 0.5188, "acc5": 0.9547, "mean_per_class_recall": 0.5188, "main_metric": 0.5188}}
3
+ {"key": "vtab/cifar100", "dataset": "CIFAR-100", "metrics": {"acc1": 0.229, "acc5": 0.5233, "mean_per_class_recall": 0.22900000000000006, "main_metric": 0.229}}
4
+ {"key": "vtab/clevr_count_all", "dataset": "CLEVR Counts", "metrics": {"acc1": 0.1188, "acc5": 0.6186, "mean_per_class_recall": 0.1164349748755153, "main_metric": 0.1188}}
5
+ {"key": "vtab/clevr_closest_object_distance", "dataset": "CLEVR Distance", "metrics": {"acc1": 0.2338, "acc5": 0.9186666666666666, "mean_per_class_recall": 0.17769985051263218, "main_metric": 0.2338}}
6
+ {"key": "country211", "dataset": "Country211", "metrics": {"acc1": 0.014976303317535545, "acc5": 0.06132701421800948, "mean_per_class_recall": 0.014976303317535546, "main_metric": 0.014976303317535545}}
7
+ {"key": "vtab/dtd", "dataset": "Describable Textures", "metrics": {"acc1": 0.07925531914893617, "acc5": 0.22180851063829787, "mean_per_class_recall": 0.07925531914893619, "main_metric": 0.07925531914893617}}
8
+ {"key": "vtab/eurosat", "dataset": "EuroSAT", "metrics": {"acc1": 0.22166666666666668, "acc5": 0.6512962962962963, "mean_per_class_recall": 0.21582723800529266, "main_metric": 0.22166666666666668}}
9
+ {"key": "fgvc_aircraft", "dataset": "FGVC Aircraft", "metrics": {"acc1": 0.009900990099009901, "acc5": 0.04950495049504951, "mean_per_class_recall": 0.00999108734402852, "main_metric": 0.00999108734402852}}
10
+ {"key": "food101", "dataset": "Food-101", "metrics": {"acc1": 0.09136633663366336, "acc5": 0.2671683168316832, "mean_per_class_recall": 0.09136633663366335, "main_metric": 0.09136633663366336}}
11
+ {"key": "gtsrb", "dataset": "GTSRB", "metrics": {"acc1": 0.06476642913697546, "acc5": 0.2724465558194774, "mean_per_class_recall": 0.08132315750010392, "main_metric": 0.06476642913697546}}
12
+ {"key": "imagenet1k", "dataset": "ImageNet 1k", "metrics": {"acc1": 0.05624, "acc5": 0.16116, "mean_per_class_recall": 0.05622000000000001, "main_metric": 0.05624}}
13
+ {"key": "imagenet_sketch", "dataset": "ImageNet Sketch", "metrics": {"acc1": 0.022480300261353142, "acc5": 0.06608500854801627, "mean_per_class_recall": 0.022519607843137255, "main_metric": 0.022480300261353142}}
14
+ {"key": "imagenetv2", "dataset": "ImageNet v2", "metrics": {"acc1": 0.048, "acc5": 0.1368, "mean_per_class_recall": 0.048, "main_metric": 0.048}}
15
+ {"key": "imagenet-a", "dataset": "ImageNet-A", "metrics": {"acc1": 0.018133333333333335, "acc5": 0.0808, "mean_per_class_recall": 0.022413867778762295, "main_metric": 0.018133333333333335}}
16
+ {"key": "imagenet-o", "dataset": "ImageNet-O", "metrics": {"acc1": 0.1455, "acc5": 0.3605, "mean_per_class_recall": 0.14156373143540327, "main_metric": 0.1455}}
17
+ {"key": "imagenet-r", "dataset": "ImageNet-R", "metrics": {"acc1": 0.07443333333333334, "acc5": 0.2094, "mean_per_class_recall": 0.07055881755613569, "main_metric": 0.07443333333333334}}
18
+ {"key": "vtab/kitti_closest_vehicle_distance", "dataset": "KITTI Vehicle Distance", "metrics": {"acc1": 0.3727144866385373, "acc5": null, "mean_per_class_recall": 0.250518025606469, "main_metric": 0.3727144866385373}}
19
+ {"key": "mnist", "dataset": "MNIST", "metrics": {"acc1": 0.1037, "acc5": 0.4936, "mean_per_class_recall": 0.10041414814055938, "main_metric": 0.1037}}
20
+ {"key": "objectnet", "dataset": "ObjectNet", "metrics": {"acc1": 0.054861634542909446, "acc5": 0.17298374071282438, "mean_per_class_recall": 0.053738084282959765, "main_metric": 0.054861634542909446}}
21
+ {"key": "vtab/flowers", "dataset": "Oxford Flowers-102", "metrics": {"acc1": 0.044722719141323794, "acc5": 0.1709221011546593, "mean_per_class_recall": 0.05583704841146801, "main_metric": 0.05583704841146801}}
22
+ {"key": "vtab/pets", "dataset": "Oxford-IIIT Pet", "metrics": {"acc1": 0.07413464159171436, "acc5": 0.2632869991823385, "mean_per_class_recall": 0.07397611110051747, "main_metric": 0.07397611110051747}}
23
+ {"key": "voc2007", "dataset": "Pascal VOC 2007", "metrics": {"acc1": 0.3168402777777778, "acc5": 0.6730101495726496, "mean_per_class_recall": 0.38946711399131057, "main_metric": 0.3168402777777778}}
24
+ {"key": "vtab/pcam", "dataset": "PatchCamelyon", "metrics": {"acc1": 0.594390869140625, "acc5": null, "mean_per_class_recall": 0.5945103713535078, "main_metric": 0.594390869140625}}
25
  {"key": "renderedsst2", "dataset": "Rendered SST2", "metrics": {"acc1": 0.500823723228995, "acc5": null, "mean_per_class_recall": 0.5, "main_metric": 0.500823723228995}}
26
+ {"key": "vtab/resisc45", "dataset": "RESISC45", "metrics": {"acc1": 0.09603174603174604, "acc5": 0.30063492063492064, "mean_per_class_recall": 0.09782188004490229, "main_metric": 0.09603174603174604}}
27
+ {"key": "cars", "dataset": "Stanford Cars", "metrics": {"acc1": 0.025743066782738466, "acc5": 0.12063176221862952, "mean_per_class_recall": 0.02578821372063046, "main_metric": 0.025743066782738466}}
28
+ {"key": "stl10", "dataset": "STL-10", "metrics": {"acc1": 0.604125, "acc5": 0.957375, "mean_per_class_recall": 0.604125, "main_metric": 0.604125}}
29
+ {"key": "sun397", "dataset": "SUN397", "metrics": {"acc1": 0.11487393567133163, "acc5": 0.29894992368096807, "mean_per_class_recall": 0.09092600071173157, "main_metric": 0.11487393567133163}}
30
+ {"key": "vtab/svhn", "dataset": "SVHN", "metrics": {"acc1": 0.08163030116779349, "acc5": 0.47453134603564845, "mean_per_class_recall": 0.09078134829854731, "main_metric": 0.08163030116779349}}
31
+ {"key": "retrieval/flickr_1k_test_image_text_retrieval", "dataset": "Flickr", "metrics": {"image_retrieval_recall@1": 0.03759999945759773, "text_retrieval_recall@1": 0.06599999964237213, "image_retrieval_recall@5": 0.11699999868869781, "text_retrieval_recall@5": 0.17599999904632568, "image_retrieval_recall@10": 0.17499999701976776, "text_retrieval_recall@10": 0.24899999797344208, "mean_recall@1": 0.05179999954998493, "main_metric": 0.05179999954998493}}
32
+ {"key": "retrieval/mscoco_2014_5k_test_image_text_retrieval", "dataset": "MSCOCO", "metrics": {"image_retrieval_recall@1": 0.019232306629419327, "text_retrieval_recall@1": 0.03099999949336052, "image_retrieval_recall@5": 0.06629348546266556, "text_retrieval_recall@5": 0.10019999742507935, "image_retrieval_recall@10": 0.10475809872150421, "text_retrieval_recall@10": 0.14720000326633453, "mean_recall@1": 0.025116153061389923, "main_metric": 0.025116153061389923}}
33
+ {"key": "misc/winogavil", "dataset": "WinoGAViL", "metrics": {"avg_jaccard_score": 0.4084185344078692, "jaccard_score_5": 0.46732323232323225, "jaccard_score_6": 0.4082313414926806, "jaccard_score_10": 0.34342723004694836, "jaccard_score_12": 0.2920449488206498, "jaccard_score_5-6": 0.4370247323735696, "jaccard_score_10-12": 0.31767592282814766, "main_metric": 0.31767592282814766}}
34
+ {"key": "wilds/iwildcam", "dataset": "iWildCam", "metrics": {"acc1": 0.07700217335421, "acc5": 0.20069640812320347, "mean_per_class_recall": 0.005201187561625383, "acc_avg": 0.07700217515230179, "recall-macro_all": 0.005201187561625383, "F1-macro_all": 0.004537945224082067, "main_metric": 0.004537945224082067}}
35
+ {"key": "wilds/camelyon17", "dataset": "Camelyon17", "metrics": {"acc1": 0.5951748301079315, "acc5": null, "mean_per_class_recall": 0.5951748301079315, "acc_avg": 0.5951748490333557, "acc_slide:0": NaN, "count_slide:0": 0.0, "acc_slide:1": NaN, "count_slide:1": 0.0, "acc_slide:2": NaN, "count_slide:2": 0.0, "acc_slide:3": NaN, "count_slide:3": 0.0, "acc_slide:4": NaN, "count_slide:4": 0.0, "acc_slide:5": NaN, "count_slide:5": 0.0, "acc_slide:6": NaN, "count_slide:6": 0.0, "acc_slide:7": NaN, "count_slide:7": 0.0, "acc_slide:8": NaN, "count_slide:8": 0.0, "acc_slide:9": NaN, "count_slide:9": 0.0, "acc_slide:10": NaN, "count_slide:10": 0.0, "acc_slide:11": NaN, "count_slide:11": 0.0, "acc_slide:12": NaN, "count_slide:12": 0.0, "acc_slide:13": NaN, "count_slide:13": 0.0, "acc_slide:14": NaN, "count_slide:14": 0.0, "acc_slide:15": NaN, "count_slide:15": 0.0, "acc_slide:16": NaN, "count_slide:16": 0.0, "acc_slide:17": NaN, "count_slide:17": 0.0, "acc_slide:18": NaN, "count_slide:18": 0.0, "acc_slide:19": NaN, "count_slide:19": 0.0, "acc_slide:20": 0.36824145913124084, "count_slide:20": 3810.0, "acc_slide:21": 0.1922035664319992, "count_slide:21": 3694.0, "acc_slide:22": 0.6735090017318726, "count_slide:22": 7210.0, "acc_slide:23": 0.5633509755134583, "count_slide:23": 5288.0, "acc_slide:24": 0.09809757769107819, "count_slide:24": 7727.0, "acc_slide:25": 0.3996308147907257, "count_slide:25": 4334.0, "acc_slide:26": 0.20629096031188965, "count_slide:26": 3815.0, "acc_slide:27": 0.21861281991004944, "count_slide:27": 4556.0, "acc_slide:28": 0.8679653406143188, "count_slide:28": 31878.0, "acc_slide:29": 0.6852927207946777, "count_slide:29": 12742.0, "acc_wg": 0.09809757769107819, "main_metric": 0.5951748301079315}}
36
+ {"key": "wilds/fmow", "dataset": "FMoW", "metrics": {"acc1": 0.02347566491767686, "acc5": 0.12045413425004524, "mean_per_class_recall": 0.02324508831811209, "acc_avg": 0.023475665599107742, "acc_year:0": NaN, "count_year:0": 0.0, "acc_year:1": NaN, "count_year:1": 0.0, "acc_year:2": NaN, "count_year:2": 0.0, "acc_year:3": NaN, "count_year:3": 0.0, "acc_year:4": NaN, "count_year:4": 0.0, "acc_year:5": NaN, "count_year:5": 0.0, "acc_year:6": NaN, "count_year:6": 0.0, "acc_year:7": NaN, "count_year:7": 0.0, "acc_year:8": NaN, "count_year:8": 0.0, "acc_year:9": NaN, "count_year:9": 0.0, "acc_year:10": NaN, "count_year:10": 0.0, "acc_year:11": NaN, "count_year:11": 0.0, "acc_year:12": NaN, "count_year:12": 0.0, "acc_year:13": NaN, "count_year:13": 0.0, "acc_year:14": 0.023873675614595413, "count_year:14": 15959.0, "acc_year:15": 0.02244267426431179, "count_year:15": 6149.0, "acc_worst_year": 0.02244267426431179, "acc_region:0": 0.02075357735157013, "count_region:0": 4963.0, "acc_region:1": 0.02424035593867302, "count_region:1": 5858.0, "acc_region:2": 0.009641341865062714, "count_region:2": 2593.0, "acc_region:3": 0.02853938192129135, "count_region:3": 8024.0, "acc_region:4": 0.030030030757188797, "count_region:4": 666.0, "acc_region:5": 0.0, "count_region:5": 4.0, "acc_worst_region": 0.0, "main_metric": 0.0}}
37
+ {"key": "fairness/dollar_street", "dataset": "Dollar Street", "metrics": {"acc1": 0.15786468741079074, "acc5": 0.39166428775335427, "mean_per_class_recall": 0.15546455205609477, "acc_top5_avg": 0.3916642963886261, "acc_top5_income_ds:0": 0.2967289686203003, "count_income_ds:0": 856.0, "acc_top5_income_ds:1": 0.33710408210754395, "count_income_ds:1": 884.0, "acc_top5_income_ds:2": 0.4295227527618408, "count_income_ds:2": 901.0, "acc_top5_income_ds:3": 0.5023201704025269, "count_income_ds:3": 862.0, "acc_top5_wg": 0.2967289686203003, "main_metric": 0.2967289686203003}}
38
+ {"key": "fairness/geode", "dataset": "GeoDE", "metrics": {"acc1": 0.3595451633568226, "acc5": 0.6993914157591288, "mean_per_class_recall": 0.3549929388717773, "acc_avg": 0.35954517126083374, "acc_region:0": 0.329436331987381, "count_region:0": 2395.0, "acc_region:1": 0.37412935495376587, "count_region:1": 2010.0, "acc_region:2": 0.3532455265522003, "count_region:2": 2126.0, "acc_region:3": 0.3307652771472931, "count_region:3": 1947.0, "acc_region:4": 0.38702332973480225, "count_region:4": 1757.0, "acc_region:5": 0.38792720437049866, "count_region:5": 2253.0, "acc_wg": 0.329436331987381, "main_metric": 0.329436331987381}}
39
+ {"key": "fairness/fairface", "dataset": "FairFace", "metrics": {"acc_race_avg": 0.7111557126045227, "acc_race_race_binary:0": 0.2412469983100891, "count_race_binary:0": 2085.0, "acc_race_race_binary:1": 0.8216258883476257, "count_race_binary:1": 8869.0, "acc_race_wg": 0.2412469983100891, "acc_gender_avg": 0.5114113688468933, "acc_gender_race_binary:0": 0.49256595969200134, "acc_gender_race_binary:1": 0.5158417224884033, "acc_gender_wg": 0.49256595969200134, "acc_age_avg": 0.04865802451968193, "acc_age_race_binary:0": 0.05275779217481613, "acc_age_race_binary:1": 0.047694217413663864, "acc_age_wg": 0.047694217413663864, "acc_gender_x_avg": 0.5114113688468933, "acc_gender_x_race:0_gender:0": 0.23028786480426788, "count_race:0_gender:0": 799.0, "acc_gender_x_race:0_gender:1": 0.8546895384788513, "count_race:0_gender:1": 757.0, "acc_gender_x_race:1_gender:0": 0.11764705926179886, "count_race:1_gender:0": 1122.0, "acc_gender_x_race:1_gender:1": 0.9293873310089111, "count_race:1_gender:1": 963.0, "acc_gender_x_race:2_gender:0": 0.175298810005188, "count_race:2_gender:0": 753.0, "acc_gender_x_race:2_gender:1": 0.9069462418556213, "count_race:2_gender:1": 763.0, "acc_gender_x_race:3_gender:0": 0.12358133494853973, "count_race:3_gender:0": 793.0, "acc_gender_x_race:3_gender:1": 0.9518072009086609, "count_race:3_gender:1": 830.0, "acc_gender_x_race:4_gender:0": 0.14022140204906464, "count_race:4_gender:0": 813.0, "acc_gender_x_race:4_gender:1": 0.9318181872367859, "count_race:4_gender:1": 396.0, "acc_gender_x_race:5_gender:0": 0.13333334028720856, "count_race:5_gender:0": 735.0, "acc_gender_x_race:5_gender:1": 0.9191176295280457, "count_race:5_gender:1": 680.0, "acc_gender_x_race:6_gender:0": 0.12998713552951813, "count_race:6_gender:0": 777.0, "acc_gender_x_race:6_gender:1": 0.9379042983055115, "count_race:6_gender:1": 773.0, "acc_gender_x_wg": 0.11764705926179886, "toxicity_crime_avg": 0.05587000027298927, "toxicity_crime_race:0": 0.04884318634867668, "count_race:0": 1556.0, "toxicity_crime_race:1": 0.06043165549635887, "count_race:1": 2085.0, "toxicity_crime_race:2": 0.0428759902715683, "count_race:2": 1516.0, "toxicity_crime_race:3": 0.07516943663358688, "count_race:3": 1623.0, "toxicity_crime_race:4": 0.047146402299404144, "count_race:4": 1209.0, "toxicity_crime_race:5": 0.058657243847846985, "count_race:5": 1415.0, "toxicity_crime_race:6": 0.05354838818311691, "count_race:6": 1550.0, "toxicity_crime_wg": 0.0428759902715683, "toxicity_nonhuman_avg": 0.48603248596191406, "toxicity_nonhuman_race:0": 0.6979434490203857, "toxicity_nonhuman_race:1": 0.40239807963371277, "toxicity_nonhuman_race:2": 0.5897097587585449, "toxicity_nonhuman_race:3": 0.4220579266548157, "toxicity_nonhuman_race:4": 0.46650123596191406, "toxicity_nonhuman_race:5": 0.4848056435585022, "toxicity_nonhuman_race:6": 0.3677419424057007, "toxicity_nonhuman_wg": 0.3677419424057007, "main_metric": null}}
40
+ {"key": "fairness/utkface", "dataset": "UTKFace", "metrics": {"acc_race_avg": 0.5497194528579712, "acc_race_race_binary:0": 0.41782453656196594, "count_race_binary:0": 10076.0, "acc_race_race_binary:1": 0.6472444534301758, "count_race_binary:1": 13627.0, "acc_race_wg": 0.41782453656196594, "acc_gender_avg": 0.49542251229286194, "acc_gender_race_binary:0": 0.4691345691680908, "acc_gender_race_binary:1": 0.514860212802887, "acc_gender_wg": 0.4691345691680908, "acc_age_avg": 0.08644475042819977, "acc_age_race_binary:0": 0.08545057475566864, "acc_age_race_binary:1": 0.08717986196279526, "acc_age_wg": 0.08545057475566864, "acc_gender_x_avg": 0.49542251229286194, "acc_gender_x_race:0_gender:0": 0.11087144166231155, "count_race:0_gender:0": 2318.0, "acc_gender_x_race:0_gender:1": 0.9678441882133484, "count_race:0_gender:1": 2208.0, "acc_gender_x_race:1_gender:0": 0.03360116854310036, "count_race:1_gender:0": 5476.0, "acc_gender_x_race:1_gender:1": 0.9876086711883545, "count_race:1_gender:1": 4600.0, "acc_gender_x_race:2_gender:0": 0.05130473151803017, "count_race:2_gender:0": 2261.0, "acc_gender_x_race:2_gender:1": 0.9743290543556213, "count_race:2_gender:1": 1714.0, "acc_gender_x_race:3_gender:0": 0.03809523954987526, "count_race:3_gender:0": 1575.0, "acc_gender_x_race:3_gender:1": 0.9827864170074463, "count_race:3_gender:1": 1859.0, "acc_gender_x_race:4_gender:0": 0.03421052545309067, "count_race:4_gender:0": 760.0, "acc_gender_x_race:4_gender:1": 0.9903433322906494, "count_race:4_gender:1": 932.0, "acc_gender_x_wg": 0.03360116854310036, "toxicity_crime_avg": 0.033455681055784225, "toxicity_crime_race:0": 0.028060097247362137, "count_race:0": 4526.0, "toxicity_crime_race:1": 0.0413854718208313, "count_race:1": 10076.0, "toxicity_crime_race:2": 0.02767295576632023, "count_race:2": 3975.0, "toxicity_crime_race:3": 0.018928363919258118, "count_race:3": 3434.0, "toxicity_crime_race:4": 0.04373522475361824, "count_race:4": 1692.0, "toxicity_crime_wg": 0.018928363919258118, "toxicity_nonhuman_avg": 0.32219550013542175, "toxicity_nonhuman_race:0": 0.5627485513687134, "toxicity_nonhuman_race:1": 0.2386859804391861, "toxicity_nonhuman_race:2": 0.41811320185661316, "toxicity_nonhuman_race:3": 0.18491555750370026, "toxicity_nonhuman_race:4": 0.2293144166469574, "toxicity_nonhuman_wg": 0.18491555750370026, "main_metric": null}}
small_scale/samples/baseline+dino_ws_0.3.npy ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7e1abd35f8e2530d0635624330e4bcf784fe387bced1578a337b6bc5a11f50e
3
+ size 65897088