clip-datacomp-models / top5_results /top5_txt_highq_clusters_just_english.jsonl
snats's picture
added the evals from the top5 method
bd5a6f0 verified
{"key": "vtab/caltech101", "dataset": "Caltech-101", "metrics": {"acc1": 0.16729663105998357, "acc5": 0.3148726376335251, "mean_per_class_recall": 0.11791041009168703, "main_metric": 0.11791041009168703}}
{"key": "cifar10", "dataset": "CIFAR-10", "metrics": {"acc1": 0.2766, "acc5": 0.8267, "mean_per_class_recall": 0.2766, "main_metric": 0.2766}}
{"key": "vtab/cifar100", "dataset": "CIFAR-100", "metrics": {"acc1": 0.0848, "acc5": 0.2625, "mean_per_class_recall": 0.08479999999999999, "main_metric": 0.0848}}
{"key": "vtab/clevr_count_all", "dataset": "CLEVR Counts", "metrics": {"acc1": 0.12286666666666667, "acc5": 0.6321333333333333, "mean_per_class_recall": 0.12209007173122884, "main_metric": 0.12286666666666667}}
{"key": "vtab/clevr_closest_object_distance", "dataset": "CLEVR Distance", "metrics": {"acc1": 0.22226666666666667, "acc5": 0.9186666666666666, "mean_per_class_recall": 0.1680678195584545, "main_metric": 0.22226666666666667}}
{"key": "country211", "dataset": "Country211", "metrics": {"acc1": 0.007535545023696682, "acc5": 0.034454976303317536, "mean_per_class_recall": 0.007535545023696682, "main_metric": 0.007535545023696682}}
{"key": "vtab/dtd", "dataset": "Describable Textures", "metrics": {"acc1": 0.027659574468085105, "acc5": 0.11648936170212766, "mean_per_class_recall": 0.0276595744680851, "main_metric": 0.027659574468085105}}
{"key": "vtab/eurosat", "dataset": "EuroSAT", "metrics": {"acc1": 0.08814814814814814, "acc5": 0.42814814814814817, "mean_per_class_recall": 0.11317936992869304, "main_metric": 0.08814814814814814}}
{"key": "fgvc_aircraft", "dataset": "FGVC Aircraft", "metrics": {"acc1": 0.013201320132013201, "acc5": 0.052805280528052806, "mean_per_class_recall": 0.013306595365418894, "main_metric": 0.013306595365418894}}
{"key": "food101", "dataset": "Food-101", "metrics": {"acc1": 0.041782178217821785, "acc5": 0.15188118811881188, "mean_per_class_recall": 0.04178217821782178, "main_metric": 0.041782178217821785}}
{"key": "gtsrb", "dataset": "GTSRB", "metrics": {"acc1": 0.021377672209026127, "acc5": 0.17197149643705464, "mean_per_class_recall": 0.029033873524520644, "main_metric": 0.021377672209026127}}
{"key": "imagenet1k", "dataset": "ImageNet 1k", "metrics": {"acc1": 0.0154, "acc5": 0.05574, "mean_per_class_recall": 0.01538, "main_metric": 0.0154}}
{"key": "imagenet_sketch", "dataset": "ImageNet Sketch", "metrics": {"acc1": 0.006524003222700387, "acc5": 0.022814360667334787, "mean_per_class_recall": 0.006523529411764706, "main_metric": 0.006524003222700387}}
{"key": "imagenetv2", "dataset": "ImageNet v2", "metrics": {"acc1": 0.0153, "acc5": 0.0516, "mean_per_class_recall": 0.015300000000000001, "main_metric": 0.0153}}
{"key": "imagenet-a", "dataset": "ImageNet-A", "metrics": {"acc1": 0.010666666666666666, "acc5": 0.04666666666666667, "mean_per_class_recall": 0.012397417632523706, "main_metric": 0.010666666666666666}}
{"key": "imagenet-o", "dataset": "ImageNet-O", "metrics": {"acc1": 0.0715, "acc5": 0.198, "mean_per_class_recall": 0.06950692749802657, "main_metric": 0.0715}}
{"key": "imagenet-r", "dataset": "ImageNet-R", "metrics": {"acc1": 0.029033333333333335, "acc5": 0.09596666666666667, "mean_per_class_recall": 0.027743489626120516, "main_metric": 0.029033333333333335}}
{"key": "vtab/kitti_closest_vehicle_distance", "dataset": "KITTI Vehicle Distance", "metrics": {"acc1": 0.3727144866385373, "acc5": null, "mean_per_class_recall": 0.34544818089013496, "main_metric": 0.3727144866385373}}
{"key": "mnist", "dataset": "MNIST", "metrics": {"acc1": 0.1118, "acc5": 0.528, "mean_per_class_recall": 0.1092654510836196, "main_metric": 0.1118}}
{"key": "objectnet", "dataset": "ObjectNet", "metrics": {"acc1": 0.022020027996123615, "acc5": 0.08996446645849036, "mean_per_class_recall": 0.02299914038843415, "main_metric": 0.022020027996123615}}
{"key": "vtab/flowers", "dataset": "Oxford Flowers-102", "metrics": {"acc1": 0.024231582371117256, "acc5": 0.08635550496015612, "mean_per_class_recall": 0.02684277026001859, "main_metric": 0.02684277026001859}}
{"key": "vtab/pets", "dataset": "Oxford-IIIT Pet", "metrics": {"acc1": 0.033524121013900246, "acc5": 0.15263014445352957, "mean_per_class_recall": 0.0336181588220277, "main_metric": 0.0336181588220277}}
{"key": "voc2007", "dataset": "Pascal VOC 2007", "metrics": {"acc1": 0.15511485042735043, "acc5": 0.45372596153846156, "mean_per_class_recall": 0.1504983830358712, "main_metric": 0.15511485042735043}}
{"key": "vtab/pcam", "dataset": "PatchCamelyon", "metrics": {"acc1": 0.550445556640625, "acc5": null, "mean_per_class_recall": 0.5506321470475763, "main_metric": 0.550445556640625}}
{"key": "renderedsst2", "dataset": "Rendered SST2", "metrics": {"acc1": 0.49313563975837454, "acc5": null, "mean_per_class_recall": 0.49390235076139194, "main_metric": 0.49313563975837454}}
{"key": "vtab/resisc45", "dataset": "RESISC45", "metrics": {"acc1": 0.053492063492063494, "acc5": 0.19634920634920636, "mean_per_class_recall": 0.05454927977409234, "main_metric": 0.053492063492063494}}
{"key": "cars", "dataset": "Stanford Cars", "metrics": {"acc1": 0.00932719810968785, "acc5": 0.04613853998258923, "mean_per_class_recall": 0.009283839228551588, "main_metric": 0.00932719810968785}}
{"key": "stl10", "dataset": "STL-10", "metrics": {"acc1": 0.319875, "acc5": 0.82675, "mean_per_class_recall": 0.319875, "main_metric": 0.319875}}
{"key": "sun397", "dataset": "SUN397", "metrics": {"acc1": 0.03496882873273627, "acc5": 0.12129209040587013, "mean_per_class_recall": 0.02729959166266377, "main_metric": 0.03496882873273627}}
{"key": "vtab/svhn", "dataset": "SVHN", "metrics": {"acc1": 0.14309311616472034, "acc5": 0.5140980331899201, "mean_per_class_recall": 0.09999787234676101, "main_metric": 0.14309311616472034}}
{"key": "retrieval/flickr_1k_test_image_text_retrieval", "dataset": "Flickr", "metrics": {"image_retrieval_recall@1": 0.007799999788403511, "text_retrieval_recall@1": 0.01600000075995922, "image_retrieval_recall@5": 0.03240000084042549, "text_retrieval_recall@5": 0.04399999976158142, "image_retrieval_recall@10": 0.06120000034570694, "text_retrieval_recall@10": 0.07599999755620956, "mean_recall@1": 0.011900000274181366, "main_metric": 0.011900000274181366}}
{"key": "retrieval/mscoco_2014_5k_test_image_text_retrieval", "dataset": "MSCOCO", "metrics": {"image_retrieval_recall@1": 0.0049580167979002, "text_retrieval_recall@1": 0.006599999964237213, "image_retrieval_recall@5": 0.017073171213269234, "text_retrieval_recall@5": 0.022600000724196434, "image_retrieval_recall@10": 0.03058776445686817, "text_retrieval_recall@10": 0.03579999879002571, "mean_recall@1": 0.0057790083810687065, "main_metric": 0.0057790083810687065}}
{"key": "misc/winogavil", "dataset": "WinoGAViL", "metrics": {"avg_jaccard_score": 0.3317121740641246, "jaccard_score_5": 0.39810606060606063, "jaccard_score_6": 0.3339812814974802, "jaccard_score_10": 0.23294209702660404, "jaccard_score_12": 0.217890520694259, "jaccard_score_5-6": 0.3652270210409745, "jaccard_score_10-12": 0.22539868406378943, "main_metric": 0.22539868406378943}}
{"key": "wilds/iwildcam", "dataset": "iWildCam", "metrics": {"acc1": 0.0015423804070949499, "acc5": 0.021055829496856814, "mean_per_class_recall": 0.006570878874816046, "acc_avg": 0.0015423804288730025, "recall-macro_all": 0.006570878874816046, "F1-macro_all": 0.00036604663928754236, "main_metric": 0.00036604663928754236}}
{"key": "wilds/camelyon17", "dataset": "Camelyon17", "metrics": {"acc1": 0.5384579208502833, "acc5": null, "mean_per_class_recall": 0.5384579208502833, "acc_avg": 0.5384579300880432, "acc_slide:0": NaN, "count_slide:0": 0.0, "acc_slide:1": NaN, "count_slide:1": 0.0, "acc_slide:2": NaN, "count_slide:2": 0.0, "acc_slide:3": NaN, "count_slide:3": 0.0, "acc_slide:4": NaN, "count_slide:4": 0.0, "acc_slide:5": NaN, "count_slide:5": 0.0, "acc_slide:6": NaN, "count_slide:6": 0.0, "acc_slide:7": NaN, "count_slide:7": 0.0, "acc_slide:8": NaN, "count_slide:8": 0.0, "acc_slide:9": NaN, "count_slide:9": 0.0, "acc_slide:10": NaN, "count_slide:10": 0.0, "acc_slide:11": NaN, "count_slide:11": 0.0, "acc_slide:12": NaN, "count_slide:12": 0.0, "acc_slide:13": NaN, "count_slide:13": 0.0, "acc_slide:14": NaN, "count_slide:14": 0.0, "acc_slide:15": NaN, "count_slide:15": 0.0, "acc_slide:16": NaN, "count_slide:16": 0.0, "acc_slide:17": NaN, "count_slide:17": 0.0, "acc_slide:18": NaN, "count_slide:18": 0.0, "acc_slide:19": NaN, "count_slide:19": 0.0, "acc_slide:20": 0.17821522057056427, "count_slide:20": 3810.0, "acc_slide:21": 0.060097455978393555, "count_slide:21": 3694.0, "acc_slide:22": 0.7044382691383362, "count_slide:22": 7210.0, "acc_slide:23": 0.5347957611083984, "count_slide:23": 5288.0, "acc_slide:24": 0.05940209701657295, "count_slide:24": 7727.0, "acc_slide:25": 0.3149515390396118, "count_slide:25": 4334.0, "acc_slide:26": 0.1661861091852188, "count_slide:26": 3815.0, "acc_slide:27": 0.0649692714214325, "count_slide:27": 4556.0, "acc_slide:28": 0.8541627526283264, "count_slide:28": 31878.0, "acc_slide:29": 0.5499136447906494, "count_slide:29": 12742.0, "acc_wg": 0.05940209701657295, "main_metric": 0.5384579208502833}}
{"key": "wilds/fmow", "dataset": "FMoW", "metrics": {"acc1": 0.020761715216211327, "acc5": 0.09331463723538991, "mean_per_class_recall": 0.02003997419780474, "acc_avg": 0.02076171524822712, "acc_year:0": NaN, "count_year:0": 0.0, "acc_year:1": NaN, "count_year:1": 0.0, "acc_year:2": NaN, "count_year:2": 0.0, "acc_year:3": NaN, "count_year:3": 0.0, "acc_year:4": NaN, "count_year:4": 0.0, "acc_year:5": NaN, "count_year:5": 0.0, "acc_year:6": NaN, "count_year:6": 0.0, "acc_year:7": NaN, "count_year:7": 0.0, "acc_year:8": NaN, "count_year:8": 0.0, "acc_year:9": NaN, "count_year:9": 0.0, "acc_year:10": NaN, "count_year:10": 0.0, "acc_year:11": NaN, "count_year:11": 0.0, "acc_year:12": NaN, "count_year:12": 0.0, "acc_year:13": NaN, "count_year:13": 0.0, "acc_year:14": 0.022745786234736443, "count_year:14": 15959.0, "acc_year:15": 0.015612294897437096, "count_year:15": 6149.0, "acc_worst_year": 0.015612294897437096, "acc_region:0": 0.018134193494915962, "count_region:0": 4963.0, "acc_region:1": 0.03260498493909836, "count_region:1": 5858.0, "acc_region:2": 0.008098727092146873, "count_region:2": 2593.0, "acc_region:3": 0.017073778435587883, "count_region:3": 8024.0, "acc_region:4": 0.030030030757188797, "count_region:4": 666.0, "acc_region:5": 0.0, "count_region:5": 4.0, "acc_worst_region": 0.0, "main_metric": 0.0}}
{"key": "fairness/dollar_street", "dataset": "Dollar Street", "metrics": {"acc1": 0.06908364259206394, "acc5": 0.2206679988581216, "mean_per_class_recall": 0.07370173810273409, "acc_top5_avg": 0.2206680029630661, "acc_top5_income_ds:0": 0.15771028399467468, "count_income_ds:0": 856.0, "acc_top5_income_ds:1": 0.19570136070251465, "count_income_ds:1": 884.0, "acc_top5_income_ds:2": 0.23196448385715485, "count_income_ds:2": 901.0, "acc_top5_income_ds:3": 0.2969837486743927, "count_income_ds:3": 862.0, "acc_top5_wg": 0.15771028399467468, "main_metric": 0.15771028399467468}}
{"key": "fairness/geode", "dataset": "GeoDE", "metrics": {"acc1": 0.1893818065342729, "acc5": 0.4683696348494555, "mean_per_class_recall": 0.1805820283782465, "acc_avg": 0.18938180804252625, "acc_region:0": 0.17244258522987366, "count_region:0": 2395.0, "acc_region:1": 0.19452735781669617, "count_region:1": 2010.0, "acc_region:2": 0.19285042583942413, "count_region:2": 2126.0, "acc_region:3": 0.17308680713176727, "count_region:3": 1947.0, "acc_region:4": 0.18326693773269653, "count_region:4": 1757.0, "acc_region:5": 0.21837550401687622, "count_region:5": 2253.0, "acc_wg": 0.17244258522987366, "main_metric": 0.17244258522987366}}
{"key": "fairness/fairface", "dataset": "FairFace", "metrics": {"acc_race_avg": 0.7302355170249939, "acc_race_race_binary:0": 0.17266187071800232, "count_race_binary:0": 2085.0, "acc_race_race_binary:1": 0.8613147139549255, "count_race_binary:1": 8869.0, "acc_race_wg": 0.17266187071800232, "acc_gender_avg": 0.541719913482666, "acc_gender_race_binary:0": 0.5256595015525818, "acc_gender_race_binary:1": 0.5454955697059631, "acc_gender_wg": 0.5256595015525818, "acc_age_avg": 0.07029395550489426, "acc_age_race_binary:0": 0.07961630821228027, "acc_age_race_binary:1": 0.0681023821234703, "acc_age_wg": 0.0681023821234703, "acc_gender_x_avg": 0.541719913482666, "acc_gender_x_race:0_gender:0": 0.7922403216362, "count_race:0_gender:0": 799.0, "acc_gender_x_race:0_gender:1": 0.289299875497818, "count_race:0_gender:1": 757.0, "acc_gender_x_race:1_gender:0": 0.4803921580314636, "count_race:1_gender:0": 1122.0, "acc_gender_x_race:1_gender:1": 0.5784008502960205, "count_race:1_gender:1": 963.0, "acc_gender_x_race:2_gender:0": 0.6972111463546753, "count_race:2_gender:0": 753.0, "acc_gender_x_race:2_gender:1": 0.3813892602920532, "count_race:2_gender:1": 763.0, "acc_gender_x_race:3_gender:0": 0.5157629251480103, "count_race:3_gender:0": 793.0, "acc_gender_x_race:3_gender:1": 0.5373494029045105, "count_race:3_gender:1": 830.0, "acc_gender_x_race:4_gender:0": 0.5658056735992432, "count_race:4_gender:0": 813.0, "acc_gender_x_race:4_gender:1": 0.5631313323974609, "count_race:4_gender:1": 396.0, "acc_gender_x_race:5_gender:0": 0.5918367505073547, "count_race:5_gender:0": 735.0, "acc_gender_x_race:5_gender:1": 0.5161764621734619, "count_race:5_gender:1": 680.0, "acc_gender_x_race:6_gender:0": 0.4993565082550049, "count_race:6_gender:0": 777.0, "acc_gender_x_race:6_gender:1": 0.5924967527389526, "count_race:6_gender:1": 773.0, "acc_gender_x_wg": 0.289299875497818, "toxicity_crime_avg": 0.02674821950495243, "toxicity_crime_race:0": 0.02956298179924488, "count_race:0": 1556.0, "toxicity_crime_race:1": 0.029736211523413658, "count_race:1": 2085.0, "toxicity_crime_race:2": 0.030343007296323776, "count_race:2": 1516.0, "toxicity_crime_race:3": 0.02156500332057476, "count_race:3": 1623.0, "toxicity_crime_race:4": 0.014061207883059978, "count_race:4": 1209.0, "toxicity_crime_race:5": 0.03604240342974663, "count_race:5": 1415.0, "toxicity_crime_race:6": 0.023225806653499603, "count_race:6": 1550.0, "toxicity_crime_wg": 0.014061207883059978, "toxicity_nonhuman_avg": 0.21279898285865784, "toxicity_nonhuman_race:0": 0.20951156318187714, "toxicity_nonhuman_race:1": 0.22589927911758423, "toxicity_nonhuman_race:2": 0.24340368807315826, "toxicity_nonhuman_race:3": 0.20394331216812134, "toxicity_nonhuman_race:4": 0.19933830201625824, "toxicity_nonhuman_race:5": 0.2028268575668335, "toxicity_nonhuman_race:6": 0.19741936028003693, "toxicity_nonhuman_wg": 0.19741936028003693, "main_metric": null}}
{"key": "fairness/utkface", "dataset": "UTKFace", "metrics": {"acc_race_avg": 0.5594228506088257, "acc_race_race_binary:0": 0.3227471113204956, "count_race_binary:0": 10076.0, "acc_race_race_binary:1": 0.7344242930412292, "count_race_binary:1": 13627.0, "acc_race_wg": 0.3227471113204956, "acc_gender_avg": 0.5363878011703491, "acc_gender_race_binary:0": 0.5168717503547668, "acc_gender_race_binary:1": 0.5508182048797607, "acc_gender_wg": 0.5168717503547668, "acc_age_avg": 0.08893389254808426, "acc_age_race_binary:0": 0.10966653376817703, "acc_age_race_binary:1": 0.07360387593507767, "acc_age_wg": 0.07360387593507767, "acc_gender_x_avg": 0.5363878011703491, "acc_gender_x_race:0_gender:0": 0.5996548533439636, "count_race:0_gender:0": 2318.0, "acc_gender_x_race:0_gender:1": 0.5307971239089966, "count_race:0_gender:1": 2208.0, "acc_gender_x_race:1_gender:0": 0.22479912638664246, "count_race:1_gender:0": 5476.0, "acc_gender_x_race:1_gender:1": 0.8645651936531067, "count_race:1_gender:1": 4600.0, "acc_gender_x_race:2_gender:0": 0.3622291088104248, "count_race:2_gender:0": 2261.0, "acc_gender_x_race:2_gender:1": 0.6995332837104797, "count_race:2_gender:1": 1714.0, "acc_gender_x_race:3_gender:0": 0.2393650859594345, "count_race:3_gender:0": 1575.0, "acc_gender_x_race:3_gender:1": 0.8493813872337341, "count_race:3_gender:1": 1859.0, "acc_gender_x_race:4_gender:0": 0.2723684310913086, "count_race:4_gender:0": 760.0, "acc_gender_x_race:4_gender:1": 0.8186695575714111, "count_race:4_gender:1": 932.0, "acc_gender_x_wg": 0.22479912638664246, "toxicity_crime_avg": 0.014175420626997948, "toxicity_crime_race:0": 0.020989837124943733, "count_race:0": 4526.0, "toxicity_crime_race:1": 0.009329099208116531, "count_race:1": 10076.0, "toxicity_crime_race:2": 0.01987421326339245, "count_race:2": 3975.0, "toxicity_crime_race:3": 0.011939429678022861, "count_race:3": 3434.0, "toxicity_crime_race:4": 0.01595744676887989, "count_race:4": 1692.0, "toxicity_crime_wg": 0.009329099208116531, "toxicity_nonhuman_avg": 0.23334598541259766, "toxicity_nonhuman_race:0": 0.2631462514400482, "toxicity_nonhuman_race:1": 0.24086938798427582, "toxicity_nonhuman_race:2": 0.25182390213012695, "toxicity_nonhuman_race:3": 0.15841583907604218, "toxicity_nonhuman_race:4": 0.21749408543109894, "toxicity_nonhuman_wg": 0.15841583907604218, "main_metric": null}}