OpenCLIP
PyTorch
clip
File size: 16,551 Bytes
9a600ef
 
38dfb6f
 
9a600ef
 
 
 
38dfb6f
9a600ef
 
 
 
 
 
 
 
 
38dfb6f
 
9a600ef
 
 
38dfb6f
 
 
9a600ef
38dfb6f
9a600ef
38dfb6f
72b919e
9a600ef
72b919e
9a600ef
38dfb6f
 
9a600ef
 
38dfb6f
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
{"key": "imagenet1k", "dataset": "ImageNet 1k", "metrics": {"acc1": 0.84218, "acc5": 0.97496, "mean_per_class_recall": 0.84218, "main_metric": 0.84218}}
{"key": "vtab/caltech101", "dataset": "Caltech-101", "metrics": {"acc1": 0.8547247329498767, "acc5": 0.9406737880032868, "mean_per_class_recall": 0.9544791606967757, "main_metric": 0.9544791606967757}}
{"key": "cifar10", "dataset": "CIFAR-10", "metrics": {"acc1": 0.9879, "acc5": 0.9998, "mean_per_class_recall": 0.9879, "main_metric": 0.9879}}
{"key": "vtab/cifar100", "dataset": "CIFAR-100", "metrics": {"acc1": 0.9041, "acc5": 0.9884, "mean_per_class_recall": 0.9041, "main_metric": 0.9041}}
{"key": "vtab/clevr_count_all", "dataset": "CLEVR Counts", "metrics": {"acc1": 0.36246666666666666, "acc5": 0.97, "mean_per_class_recall": 0.3654495182033677, "main_metric": 0.36246666666666666}}
{"key": "vtab/clevr_closest_object_distance", "dataset": "CLEVR Distance", "metrics": {"acc1": 0.20606666666666668, "acc5": 0.9186666666666666, "mean_per_class_recall": 0.15593116563976261, "main_metric": 0.20606666666666668}}
{"key": "country211", "dataset": "Country211", "metrics": {"acc1": 0.3767298578199052, "acc5": 0.6264454976303317, "mean_per_class_recall": 0.3767298578199052, "main_metric": 0.3767298578199052}}
{"key": "vtab/dtd", "dataset": "Describable Textures", "metrics": {"acc1": 0.7138297872340426, "acc5": 0.9404255319148936, "mean_per_class_recall": 0.7138297872340424, "main_metric": 0.7138297872340426}}
{"key": "vtab/eurosat", "dataset": "EuroSAT", "metrics": {"acc1": 0.6083333333333333, "acc5": 0.9844444444444445, "mean_per_class_recall": 0.6233917754874965, "main_metric": 0.6083333333333333}}
{"key": "fgvc_aircraft", "dataset": "FGVC Aircraft", "metrics": {"acc1": 0.7203720372037203, "acc5": 0.9750975097509751, "mean_per_class_recall": 0.7199376114081997, "main_metric": 0.7199376114081997}}
{"key": "food101", "dataset": "Food-101", "metrics": {"acc1": 0.9631287128712871, "acc5": 0.9965940594059406, "mean_per_class_recall": 0.9631287128712871, "main_metric": 0.9631287128712871}}
{"key": "gtsrb", "dataset": "GTSRB", "metrics": {"acc1": 0.6790182106096595, "acc5": 0.8880443388756928, "mean_per_class_recall": 0.6794453150770137, "main_metric": 0.6790182106096595}}
{"key": "imagenet_sketch", "dataset": "ImageNet Sketch", "metrics": {"acc1": 0.7333804948024131, "acc5": 0.9161508380986068, "mean_per_class_recall": 0.7336454901960784, "main_metric": 0.7333804948024131}}
{"key": "imagenetv2", "dataset": "ImageNet v2", "metrics": {"acc1": 0.7837, "acc5": 0.9489, "mean_per_class_recall": 0.7843, "main_metric": 0.7837}}
{"key": "imagenet-a", "dataset": "ImageNet-A", "metrics": {"acc1": 0.7992, "acc5": 0.9442666666666667, "mean_per_class_recall": 0.769014748285832, "main_metric": 0.7992}}
{"key": "imagenet-o", "dataset": "ImageNet-O", "metrics": {"acc1": 0.3785, "acc5": 0.727, "mean_per_class_recall": 0.39266403450350823, "main_metric": 0.3785}}
{"key": "imagenet-r", "dataset": "ImageNet-R", "metrics": {"acc1": 0.9376333333333333, "acc5": 0.9865, "mean_per_class_recall": 0.9283762658708951, "main_metric": 0.9376333333333333}}
{"key": "vtab/kitti_closest_vehicle_distance", "dataset": "KITTI Vehicle Distance", "metrics": {"acc1": 0.38255977496483823, "acc5": null, "mean_per_class_recall": 0.4146511156566664, "main_metric": 0.38255977496483823}}
{"key": "mnist", "dataset": "MNIST", "metrics": {"acc1": 0.8372, "acc5": 0.979, "mean_per_class_recall": 0.8369686002545691, "main_metric": 0.8372}}
{"key": "objectnet", "dataset": "ObjectNet", "metrics": {"acc1": 0.796866587703241, "acc5": 0.9307634327554646, "mean_per_class_recall": 0.7875438166435634, "main_metric": 0.796866587703241}}
{"key": "vtab/flowers", "dataset": "Oxford Flowers-102", "metrics": {"acc1": 0.9172223125711498, "acc5": 0.9821109123434705, "mean_per_class_recall": 0.8968338139029207, "main_metric": 0.8968338139029207}}
{"key": "vtab/pets", "dataset": "Oxford-IIIT Pet", "metrics": {"acc1": 0.9670209866448624, "acc5": 0.9994548923412374, "mean_per_class_recall": 0.9668411993435574, "main_metric": 0.9668411993435574}}
{"key": "voc2007", "dataset": "Pascal VOC 2007", "metrics": {"acc1": 0.8262553418803419, "acc5": 0.9788995726495726, "mean_per_class_recall": 0.9244459986704573, "main_metric": 0.8262553418803419}}
{"key": "vtab/pcam", "dataset": "PatchCamelyon", "metrics": {"acc1": 0.695953369140625, "acc5": null, "mean_per_class_recall": 0.6958949401931914, "main_metric": 0.695953369140625}}
{"key": "renderedsst2", "dataset": "Rendered SST2", "metrics": {"acc1": 0.5667215815485996, "acc5": null, "mean_per_class_recall": 0.5660916420589427, "main_metric": 0.5667215815485996}}
{"key": "vtab/resisc45", "dataset": "RESISC45", "metrics": {"acc1": 0.7550793650793651, "acc5": 0.9533333333333334, "mean_per_class_recall": 0.7605913193101234, "main_metric": 0.7550793650793651}}
{"key": "cars", "dataset": "Stanford Cars", "metrics": {"acc1": 0.9599552294490735, "acc5": 0.9993781867926875, "mean_per_class_recall": 0.9604615269292067, "main_metric": 0.9599552294490735}}
{"key": "stl10", "dataset": "STL-10", "metrics": {"acc1": 0.991125, "acc5": 1.0, "mean_per_class_recall": 0.9911249999999999, "main_metric": 0.991125}}
{"key": "sun397", "dataset": "SUN397", "metrics": {"acc1": 0.772799161410155, "acc5": 0.9707137208746345, "mean_per_class_recall": 0.7744819326140584, "main_metric": 0.772799161410155}}
{"key": "vtab/svhn", "dataset": "SVHN", "metrics": {"acc1": 0.6712507682851875, "acc5": 0.9469499078057775, "mean_per_class_recall": 0.6973586130772338, "main_metric": 0.6712507682851875}}
{"key": "retrieval/flickr_1k_test_image_text_retrieval", "dataset": "Flickr", "metrics": {"image_retrieval_recall@1": 0.8285999894142151, "text_retrieval_recall@1": 0.9330000281333923, "image_retrieval_recall@5": 0.9606000185012817, "text_retrieval_recall@5": 0.9919999837875366, "image_retrieval_recall@10": 0.9811999797821045, "text_retrieval_recall@10": 0.9940000176429749, "mean_recall@1": 0.8808000087738037, "main_metric": 0.8808000087738037}}
{"key": "retrieval/mscoco_2014_5k_test_image_text_retrieval", "dataset": "MSCOCO", "metrics": {"image_retrieval_recall@1": 0.5555777549743652, "text_retrieval_recall@1": 0.7182000279426575, "image_retrieval_recall@5": 0.7921631336212158, "text_retrieval_recall@5": 0.9034000039100647, "image_retrieval_recall@10": 0.8635745644569397, "text_retrieval_recall@10": 0.9488000273704529, "mean_recall@1": 0.6368888914585114, "main_metric": 0.6368888914585114}}
{"key": "misc/winogavil", "dataset": "WinoGAViL", "metrics": {"avg_jaccard_score": 0.6094382743274126, "jaccard_score_5": 0.6359090909090908, "jaccard_score_6": 0.6074154067674586, "jaccard_score_10": 0.5799910574558462, "jaccard_score_12": 0.5636737872719181, "jaccard_score_5-6": 0.6212993724621632, "jaccard_score_10-12": 0.5718133154901305, "main_metric": 0.5718133154901305}}
{"key": "wilds/iwildcam", "dataset": "iWildCam", "metrics": {"acc1": 0.30333481339534013, "acc5": 0.6892337173704751, "mean_per_class_recall": 0.28950048185612676, "acc_avg": 0.3045266568660736, "recall-macro_all": 0.28950048185612676, "F1-macro_all": 0.22491081368070331, "main_metric": 0.22491081368070331}}
{"key": "wilds/camelyon17", "dataset": "Camelyon17", "metrics": {"acc1": 0.7115362005314271, "acc5": null, "mean_per_class_recall": 0.7115362005314271, "acc_avg": 0.7115362286567688, "acc_slide:0": NaN, "count_slide:0": 0.0, "acc_slide:1": NaN, "count_slide:1": 0.0, "acc_slide:2": NaN, "count_slide:2": 0.0, "acc_slide:3": NaN, "count_slide:3": 0.0, "acc_slide:4": NaN, "count_slide:4": 0.0, "acc_slide:5": NaN, "count_slide:5": 0.0, "acc_slide:6": NaN, "count_slide:6": 0.0, "acc_slide:7": NaN, "count_slide:7": 0.0, "acc_slide:8": NaN, "count_slide:8": 0.0, "acc_slide:9": NaN, "count_slide:9": 0.0, "acc_slide:10": NaN, "count_slide:10": 0.0, "acc_slide:11": NaN, "count_slide:11": 0.0, "acc_slide:12": NaN, "count_slide:12": 0.0, "acc_slide:13": NaN, "count_slide:13": 0.0, "acc_slide:14": NaN, "count_slide:14": 0.0, "acc_slide:15": NaN, "count_slide:15": 0.0, "acc_slide:16": NaN, "count_slide:16": 0.0, "acc_slide:17": NaN, "count_slide:17": 0.0, "acc_slide:18": NaN, "count_slide:18": 0.0, "acc_slide:19": NaN, "count_slide:19": 0.0, "acc_slide:20": 0.48687663674354553, "count_slide:20": 3810.0, "acc_slide:21": 0.5173254013061523, "count_slide:21": 3694.0, "acc_slide:22": 0.7785021066665649, "count_slide:22": 7210.0, "acc_slide:23": 0.7358169555664062, "count_slide:23": 5288.0, "acc_slide:24": 0.5286657214164734, "count_slide:24": 7727.0, "acc_slide:25": 0.592062771320343, "count_slide:25": 4334.0, "acc_slide:26": 0.3850589692592621, "count_slide:26": 3815.0, "acc_slide:27": 0.39288848638534546, "count_slide:27": 4556.0, "acc_slide:28": 0.8762155771255493, "count_slide:28": 31878.0, "acc_slide:29": 0.7382671236991882, "count_slide:29": 12742.0, "acc_wg": 0.3850589692592621, "main_metric": 0.7115362005314271}}
{"key": "wilds/fmow", "dataset": "FMoW", "metrics": {"acc1": 0.3126470056088294, "acc5": 0.6045322960014474, "mean_per_class_recall": 0.3277600321233648, "acc_avg": 0.3126470148563385, "acc_year:0": NaN, "count_year:0": 0.0, "acc_year:1": NaN, "count_year:1": 0.0, "acc_year:2": NaN, "count_year:2": 0.0, "acc_year:3": NaN, "count_year:3": 0.0, "acc_year:4": NaN, "count_year:4": 0.0, "acc_year:5": NaN, "count_year:5": 0.0, "acc_year:6": NaN, "count_year:6": 0.0, "acc_year:7": NaN, "count_year:7": 0.0, "acc_year:8": NaN, "count_year:8": 0.0, "acc_year:9": NaN, "count_year:9": 0.0, "acc_year:10": NaN, "count_year:10": 0.0, "acc_year:11": NaN, "count_year:11": 0.0, "acc_year:12": NaN, "count_year:12": 0.0, "acc_year:13": NaN, "count_year:13": 0.0, "acc_year:14": 0.32834136486053467, "count_year:14": 15959.0, "acc_year:15": 0.27191412448883057, "count_year:15": 6149.0, "acc_worst_year": 0.27191412448883057, "acc_region:0": 0.27342334389686584, "count_region:0": 4963.0, "acc_region:1": 0.3407306373119354, "count_region:1": 5858.0, "acc_region:2": 0.20902429521083832, "count_region:2": 2593.0, "acc_region:3": 0.3352442681789398, "count_region:3": 8024.0, "acc_region:4": 0.4864864945411682, "count_region:4": 666.0, "acc_region:5": 0.75, "count_region:5": 4.0, "acc_worst_region": 0.20902429521083832, "main_metric": 0.20902429521083832}}
{"key": "fairness/dollar_street", "dataset": "Dollar Street", "metrics": {"acc1": 0.5949186411647159, "acc5": 0.8467028261490152, "mean_per_class_recall": 0.6216234238329277, "acc_top5_avg": 0.8467028141021729, "acc_top5_income_ds:0": 0.7172897458076477, "count_income_ds:0": 856.0, "acc_top5_income_ds:1": 0.8585972785949707, "count_income_ds:1": 884.0, "acc_top5_income_ds:2": 0.8779134154319763, "count_income_ds:2": 901.0, "acc_top5_income_ds:3": 0.9303944110870361, "count_income_ds:3": 862.0, "acc_top5_wg": 0.7172897458076477, "main_metric": 0.7172897458076477}}
{"key": "fairness/geode", "dataset": "GeoDE", "metrics": {"acc1": 0.9507527226137091, "acc5": 0.9979980781550288, "mean_per_class_recall": 0.9494773197510623, "acc_avg": 0.9507527351379395, "acc_region:0": 0.9356994032859802, "count_region:0": 2395.0, "acc_region:1": 0.9527363181114197, "count_region:1": 2010.0, "acc_region:2": 0.9524929523468018, "count_region:2": 2126.0, "acc_region:3": 0.9501797556877136, "count_region:3": 1947.0, "acc_region:4": 0.9521912336349487, "count_region:4": 1757.0, "acc_region:5": 0.9627164006233215, "count_region:5": 2253.0, "acc_wg": 0.9356994032859802, "main_metric": 0.9356994032859802}}
{"key": "fairness/fairface", "dataset": "FairFace", "metrics": {"acc_race_avg": 0.8778528571128845, "acc_race_race_binary:0": 0.8292565941810608, "count_race_binary:0": 2085.0, "acc_race_race_binary:1": 0.8892772793769836, "count_race_binary:1": 8869.0, "acc_race_wg": 0.8292565941810608, "acc_gender_avg": 0.93801349401474, "acc_gender_race_binary:0": 0.9534772038459778, "acc_gender_race_binary:1": 0.9343781471252441, "acc_gender_wg": 0.9343781471252441, "acc_age_avg": 0.5100420117378235, "acc_age_race_binary:0": 0.5275779366493225, "acc_age_race_binary:1": 0.5059195160865784, "acc_age_wg": 0.5059195160865784, "acc_gender_x_avg": 0.93801349401474, "acc_gender_x_race:0_gender:0": 0.8861076235771179, "count_race:0_gender:0": 799.0, "acc_gender_x_race:0_gender:1": 0.9035667181015015, "count_race:0_gender:1": 757.0, "acc_gender_x_race:1_gender:0": 0.9474153518676758, "count_race:1_gender:0": 1122.0, "acc_gender_x_race:1_gender:1": 0.9605399966239929, "count_race:1_gender:1": 963.0, "acc_gender_x_race:2_gender:0": 0.9189907312393188, "count_race:2_gender:0": 753.0, "acc_gender_x_race:2_gender:1": 0.9593709111213684, "count_race:2_gender:1": 763.0, "acc_gender_x_race:3_gender:0": 0.9104666113853455, "count_race:3_gender:0": 793.0, "acc_gender_x_race:3_gender:1": 0.966265082359314, "count_race:3_gender:1": 830.0, "acc_gender_x_race:4_gender:0": 0.9667896628379822, "count_race:4_gender:0": 813.0, "acc_gender_x_race:4_gender:1": 0.9722222089767456, "count_race:4_gender:1": 396.0, "acc_gender_x_race:5_gender:0": 0.8952381014823914, "count_race:5_gender:0": 735.0, "acc_gender_x_race:5_gender:1": 0.9720588326454163, "count_race:5_gender:1": 680.0, "acc_gender_x_race:6_gender:0": 0.9060488939285278, "count_race:6_gender:0": 777.0, "acc_gender_x_race:6_gender:1": 0.9741267561912537, "count_race:6_gender:1": 773.0, "acc_gender_x_wg": 0.8861076235771179, "toxicity_crime_avg": 0.04884060472249985, "toxicity_crime_race:0": 0.10347043722867966, "count_race:0": 1556.0, "toxicity_crime_race:1": 0.04844124615192413, "count_race:1": 2085.0, "toxicity_crime_race:2": 0.03825857490301132, "count_race:2": 1516.0, "toxicity_crime_race:3": 0.04251386225223541, "count_race:3": 1623.0, "toxicity_crime_race:4": 0.06782464683055878, "count_race:4": 1209.0, "toxicity_crime_race:5": 0.024028267711400986, "count_race:5": 1415.0, "toxicity_crime_race:6": 0.019354838877916336, "count_race:6": 1550.0, "toxicity_crime_wg": 0.019354838877916336, "toxicity_nonhuman_avg": 0.0001825817016651854, "toxicity_nonhuman_race:0": 0.0006426735199056566, "toxicity_nonhuman_race:1": 0.00047961631207726896, "toxicity_nonhuman_race:2": 0.0, "toxicity_nonhuman_race:3": 0.0, "toxicity_nonhuman_race:4": 0.0, "toxicity_nonhuman_race:5": 0.0, "toxicity_nonhuman_race:6": 0.0, "toxicity_nonhuman_wg": 0.0, "main_metric": null}}
{"key": "fairness/utkface", "dataset": "UTKFace", "metrics": {"acc_race_avg": 0.90790194272995, "acc_race_race_binary:0": 0.9144501686096191, "count_race_binary:0": 10076.0, "acc_race_race_binary:1": 0.9030600786209106, "count_race_binary:1": 13627.0, "acc_race_wg": 0.9030600786209106, "acc_gender_avg": 0.9517782330513, "acc_gender_race_binary:0": 0.9643707871437073, "acc_gender_race_binary:1": 0.9424671530723572, "acc_gender_wg": 0.9424671530723572, "acc_age_avg": 0.527570366859436, "acc_age_race_binary:0": 0.5021833777427673, "acc_age_race_binary:1": 0.5463418364524841, "acc_age_wg": 0.5021833777427673, "acc_gender_x_avg": 0.9517782330513, "acc_gender_x_race:0_gender:0": 0.9732528328895569, "count_race:0_gender:0": 2318.0, "acc_gender_x_race:0_gender:1": 0.9633151888847351, "count_race:0_gender:1": 2208.0, "acc_gender_x_race:1_gender:0": 0.9514244198799133, "count_race:1_gender:0": 5476.0, "acc_gender_x_race:1_gender:1": 0.9797825813293457, "count_race:1_gender:1": 4600.0, "acc_gender_x_race:2_gender:0": 0.9358690977096558, "count_race:2_gender:0": 2261.0, "acc_gender_x_race:2_gender:1": 0.9784130454063416, "count_race:2_gender:1": 1714.0, "acc_gender_x_race:3_gender:0": 0.822857141494751, "count_race:3_gender:0": 1575.0, "acc_gender_x_race:3_gender:1": 0.9693383574485779, "count_race:3_gender:1": 1859.0, "acc_gender_x_race:4_gender:0": 0.8763157725334167, "count_race:4_gender:0": 760.0, "acc_gender_x_race:4_gender:1": 0.9688841104507446, "count_race:4_gender:1": 932.0, "acc_gender_x_wg": 0.822857141494751, "toxicity_crime_avg": 0.02269754931330681, "toxicity_crime_race:0": 0.020106052979826927, "count_race:0": 4526.0, "toxicity_crime_race:1": 0.016276299953460693, "count_race:1": 10076.0, "toxicity_crime_race:2": 0.027169810608029366, "count_race:2": 3975.0, "toxicity_crime_race:3": 0.03145020455121994, "count_race:3": 3434.0, "toxicity_crime_race:4": 0.03959810733795166, "count_race:4": 1692.0, "toxicity_crime_wg": 0.016276299953460693, "toxicity_nonhuman_avg": 0.0003375100204721093, "toxicity_nonhuman_race:0": 0.00022094564337749034, "toxicity_nonhuman_race:1": 0.0004962286911904812, "toxicity_nonhuman_race:2": 0.0, "toxicity_nonhuman_race:3": 0.0, "toxicity_nonhuman_race:4": 0.0011820330983027816, "toxicity_nonhuman_wg": 0.0, "main_metric": null}}