weizhiwang's picture
Upload 5 files
75fba30 verified
raw
history blame contribute delete
No virus
16.7 kB
{"key": "vtab/caltech101", "dataset": "Caltech-101", "metrics": {"acc1": 0.7485620377978636, "acc5": 0.9193097781429745, "mean_per_class_recall": 0.7174703938661421, "main_metric": 0.7174703938661421}}
{"key": "cifar10", "dataset": "CIFAR-10", "metrics": {"acc1": 0.808, "acc5": 0.9916, "mean_per_class_recall": 0.808, "main_metric": 0.808}}
{"key": "vtab/cifar100", "dataset": "CIFAR-100", "metrics": {"acc1": 0.5603, "acc5": 0.8505, "mean_per_class_recall": 0.5602999999999999, "main_metric": 0.5603}}
{"key": "vtab/clevr_count_all", "dataset": "CLEVR Counts", "metrics": {"acc1": 0.1482, "acc5": 0.6596666666666666, "mean_per_class_recall": 0.14753156497639805, "main_metric": 0.1482}}
{"key": "vtab/clevr_closest_object_distance", "dataset": "CLEVR Distance", "metrics": {"acc1": 0.21046666666666666, "acc5": 0.9186666666666666, "mean_per_class_recall": 0.15831434356685678, "main_metric": 0.21046666666666666}}
{"key": "country211", "dataset": "Country211", "metrics": {"acc1": 0.047156398104265404, "acc5": 0.14535545023696683, "mean_per_class_recall": 0.04715639810426541, "main_metric": 0.047156398104265404}}
{"key": "vtab/dtd", "dataset": "Describable Textures", "metrics": {"acc1": 0.2377659574468085, "acc5": 0.4765957446808511, "mean_per_class_recall": 0.23776595744680853, "main_metric": 0.2377659574468085}}
{"key": "vtab/eurosat", "dataset": "EuroSAT", "metrics": {"acc1": 0.33685185185185185, "acc5": 0.8644444444444445, "mean_per_class_recall": 0.33049120799359527, "main_metric": 0.33685185185185185}}
{"key": "fgvc_aircraft", "dataset": "FGVC Aircraft", "metrics": {"acc1": 0.0306030603060306, "acc5": 0.10891089108910891, "mean_per_class_recall": 0.0303475935828877, "main_metric": 0.0303475935828877}}
{"key": "food101", "dataset": "Food-101", "metrics": {"acc1": 0.4462970297029703, "acc5": 0.7431287128712871, "mean_per_class_recall": 0.4462970297029703, "main_metric": 0.4462970297029703}}
{"key": "gtsrb", "dataset": "GTSRB", "metrics": {"acc1": 0.13008709422011086, "acc5": 0.4782264449722882, "mean_per_class_recall": 0.16656554435902263, "main_metric": 0.13008709422011086}}
{"key": "imagenet1k", "dataset": "ImageNet 1k", "metrics": {"acc1": 0.30318, "acc5": 0.56316, "mean_per_class_recall": 0.3032, "main_metric": 0.30318}}
{"key": "imagenet_sketch", "dataset": "ImageNet Sketch", "metrics": {"acc1": 0.21460433492503292, "acc5": 0.4268309457839612, "mean_per_class_recall": 0.2147898039215686, "main_metric": 0.21460433492503292}}
{"key": "imagenetv2", "dataset": "ImageNet v2", "metrics": {"acc1": 0.252, "acc5": 0.498, "mean_per_class_recall": 0.2524, "main_metric": 0.252}}
{"key": "imagenet-a", "dataset": "ImageNet-A", "metrics": {"acc1": 0.0512, "acc5": 0.19613333333333333, "mean_per_class_recall": 0.06014155302385772, "main_metric": 0.0512}}
{"key": "imagenet-o", "dataset": "ImageNet-O", "metrics": {"acc1": 0.415, "acc5": 0.7005, "mean_per_class_recall": 0.422468183325962, "main_metric": 0.415}}
{"key": "imagenet-r", "dataset": "ImageNet-R", "metrics": {"acc1": 0.3621333333333333, "acc5": 0.6121333333333333, "mean_per_class_recall": 0.34744586795038773, "main_metric": 0.3621333333333333}}
{"key": "vtab/kitti_closest_vehicle_distance", "dataset": "KITTI Vehicle Distance", "metrics": {"acc1": 0.4149085794655415, "acc5": null, "mean_per_class_recall": 0.44129221412525704, "main_metric": 0.4149085794655415}}
{"key": "mnist", "dataset": "MNIST", "metrics": {"acc1": 0.1163, "acc5": 0.5472, "mean_per_class_recall": 0.1145462152218627, "main_metric": 0.1163}}
{"key": "objectnet", "dataset": "ObjectNet", "metrics": {"acc1": 0.2389899860019382, "acc5": 0.4616130074297405, "mean_per_class_recall": 0.23185706315755167, "main_metric": 0.2389899860019382}}
{"key": "vtab/flowers", "dataset": "Oxford Flowers-102", "metrics": {"acc1": 0.3289965848105383, "acc5": 0.5776549032362985, "mean_per_class_recall": 0.3017565989815838, "main_metric": 0.3017565989815838}}
{"key": "vtab/pets", "dataset": "Oxford-IIIT Pet", "metrics": {"acc1": 0.3878440992095939, "acc5": 0.7345325701826111, "mean_per_class_recall": 0.3861881243069706, "main_metric": 0.3861881243069706}}
{"key": "voc2007", "dataset": "Pascal VOC 2007", "metrics": {"acc1": 0.6064369658119658, "acc5": 0.8708600427350427, "mean_per_class_recall": 0.6713317709078158, "main_metric": 0.6064369658119658}}
{"key": "vtab/pcam", "dataset": "PatchCamelyon", "metrics": {"acc1": 0.52630615234375, "acc5": null, "mean_per_class_recall": 0.5264856323517709, "main_metric": 0.52630615234375}}
{"key": "renderedsst2", "dataset": "Rendered SST2", "metrics": {"acc1": 0.500823723228995, "acc5": null, "mean_per_class_recall": 0.5016248335359852, "main_metric": 0.500823723228995}}
{"key": "vtab/resisc45", "dataset": "RESISC45", "metrics": {"acc1": 0.28253968253968254, "acc5": 0.596984126984127, "mean_per_class_recall": 0.28750797608664463, "main_metric": 0.28253968253968254}}
{"key": "cars", "dataset": "Stanford Cars", "metrics": {"acc1": 0.43054346474319116, "acc5": 0.8240268623305559, "mean_per_class_recall": 0.43143957675281425, "main_metric": 0.43054346474319116}}
{"key": "stl10", "dataset": "STL-10", "metrics": {"acc1": 0.85425, "acc5": 0.996375, "mean_per_class_recall": 0.8542500000000001, "main_metric": 0.85425}}
{"key": "sun397", "dataset": "SUN397", "metrics": {"acc1": 0.4252441289515788, "acc5": 0.7532228699634036, "mean_per_class_recall": 0.3895119161461437, "main_metric": 0.4252441289515788}}
{"key": "vtab/svhn", "dataset": "SVHN", "metrics": {"acc1": 0.12880301167793484, "acc5": 0.5850491702519975, "mean_per_class_recall": 0.11865552322518588, "main_metric": 0.12880301167793484}}
{"key": "retrieval/flickr_1k_test_image_text_retrieval", "dataset": "Flickr", "metrics": {"image_retrieval_recall@1": 0.2264000028371811, "text_retrieval_recall@1": 0.3109999895095825, "image_retrieval_recall@5": 0.4691999852657318, "text_retrieval_recall@5": 0.574999988079071, "image_retrieval_recall@10": 0.5843999981880188, "text_retrieval_recall@10": 0.6819999814033508, "mean_recall@1": 0.2686999961733818, "main_metric": 0.2686999961733818}}
{"key": "retrieval/mscoco_2014_5k_test_image_text_retrieval", "dataset": "MSCOCO", "metrics": {"image_retrieval_recall@1": 0.1284686177968979, "text_retrieval_recall@1": 0.2199999988079071, "image_retrieval_recall@5": 0.3093162775039673, "text_retrieval_recall@5": 0.43220001459121704, "image_retrieval_recall@10": 0.41519391536712646, "text_retrieval_recall@10": 0.5419999957084656, "mean_recall@1": 0.1742343083024025, "main_metric": 0.1742343083024025}}
{"key": "misc/winogavil", "dataset": "WinoGAViL", "metrics": {"avg_jaccard_score": 0.509030645657084, "jaccard_score_5": 0.5461868686868687, "jaccard_score_6": 0.5233981281497481, "jaccard_score_10": 0.4457746478873239, "jaccard_score_12": 0.4107699154428126, "jaccard_score_5-6": 0.5345022763627415, "jaccard_score_10-12": 0.42823129251700676, "main_metric": 0.42823129251700676}}
{"key": "wilds/iwildcam", "dataset": "iWildCam", "metrics": {"acc1": 0.1727232361945269, "acc5": 0.3400481409642214, "mean_per_class_recall": 0.03296683417770204, "acc_avg": 0.17272323369979858, "recall-macro_all": 0.03296683417770204, "F1-macro_all": 0.024197471863985638, "main_metric": 0.024197471863985638}}
{"key": "wilds/camelyon17", "dataset": "Camelyon17", "metrics": {"acc1": 0.5654878077456674, "acc5": null, "mean_per_class_recall": 0.5654878077456675, "acc_avg": 0.565487802028656, "acc_slide:0": NaN, "count_slide:0": 0.0, "acc_slide:1": NaN, "count_slide:1": 0.0, "acc_slide:2": NaN, "count_slide:2": 0.0, "acc_slide:3": NaN, "count_slide:3": 0.0, "acc_slide:4": NaN, "count_slide:4": 0.0, "acc_slide:5": NaN, "count_slide:5": 0.0, "acc_slide:6": NaN, "count_slide:6": 0.0, "acc_slide:7": NaN, "count_slide:7": 0.0, "acc_slide:8": NaN, "count_slide:8": 0.0, "acc_slide:9": NaN, "count_slide:9": 0.0, "acc_slide:10": NaN, "count_slide:10": 0.0, "acc_slide:11": NaN, "count_slide:11": 0.0, "acc_slide:12": NaN, "count_slide:12": 0.0, "acc_slide:13": NaN, "count_slide:13": 0.0, "acc_slide:14": NaN, "count_slide:14": 0.0, "acc_slide:15": NaN, "count_slide:15": 0.0, "acc_slide:16": NaN, "count_slide:16": 0.0, "acc_slide:17": NaN, "count_slide:17": 0.0, "acc_slide:18": NaN, "count_slide:18": 0.0, "acc_slide:19": NaN, "count_slide:19": 0.0, "acc_slide:20": 0.29842519760131836, "count_slide:20": 3810.0, "acc_slide:21": 0.15078505873680115, "count_slide:21": 3694.0, "acc_slide:22": 0.7105408906936646, "count_slide:22": 7210.0, "acc_slide:23": 0.5605143904685974, "count_slide:23": 5288.0, "acc_slide:24": 0.13536947965621948, "count_slide:24": 7727.0, "acc_slide:25": 0.4003230333328247, "count_slide:25": 4334.0, "acc_slide:26": 0.33840104937553406, "count_slide:26": 3815.0, "acc_slide:27": 0.10776997357606888, "count_slide:27": 4556.0, "acc_slide:28": 0.8370349407196045, "count_slide:28": 31878.0, "acc_slide:29": 0.5548579692840576, "count_slide:29": 12742.0, "acc_wg": 0.10776997357606888, "main_metric": 0.5654878077456674}}
{"key": "wilds/fmow", "dataset": "FMoW", "metrics": {"acc1": 0.07092455219829925, "acc5": 0.2710331101863579, "mean_per_class_recall": 0.07152649316961654, "acc_avg": 0.0709245502948761, "acc_year:0": NaN, "count_year:0": 0.0, "acc_year:1": NaN, "count_year:1": 0.0, "acc_year:2": NaN, "count_year:2": 0.0, "acc_year:3": NaN, "count_year:3": 0.0, "acc_year:4": NaN, "count_year:4": 0.0, "acc_year:5": NaN, "count_year:5": 0.0, "acc_year:6": NaN, "count_year:6": 0.0, "acc_year:7": NaN, "count_year:7": 0.0, "acc_year:8": NaN, "count_year:8": 0.0, "acc_year:9": NaN, "count_year:9": 0.0, "acc_year:10": NaN, "count_year:10": 0.0, "acc_year:11": NaN, "count_year:11": 0.0, "acc_year:12": NaN, "count_year:12": 0.0, "acc_year:13": NaN, "count_year:13": 0.0, "acc_year:14": 0.07193433493375778, "count_year:14": 15959.0, "acc_year:15": 0.06830378621816635, "count_year:15": 6149.0, "acc_worst_year": 0.06830378621816635, "acc_region:0": 0.05641748756170273, "count_region:0": 4963.0, "acc_region:1": 0.0802321583032608, "count_region:1": 5858.0, "acc_region:2": 0.06864635646343231, "count_region:2": 2593.0, "acc_region:3": 0.07390329241752625, "count_region:3": 8024.0, "acc_region:4": 0.07057057321071625, "count_region:4": 666.0, "acc_region:5": 0.0, "count_region:5": 4.0, "acc_worst_region": 0.0, "main_metric": 0.0}}
{"key": "fairness/dollar_street", "dataset": "Dollar Street", "metrics": {"acc1": 0.39223522694833, "acc5": 0.6431630031401656, "mean_per_class_recall": 0.42775875489808896, "acc_top5_avg": 0.6431630253791809, "acc_top5_income_ds:0": 0.4602803885936737, "count_income_ds:0": 856.0, "acc_top5_income_ds:1": 0.6029411554336548, "count_income_ds:1": 884.0, "acc_top5_income_ds:2": 0.7302996516227722, "count_income_ds:2": 901.0, "acc_top5_income_ds:3": 0.7749419808387756, "count_income_ds:3": 862.0, "acc_top5_wg": 0.4602803885936737, "main_metric": 0.4602803885936737}}
{"key": "fairness/geode", "dataset": "GeoDE", "metrics": {"acc1": 0.7298206278026906, "acc5": 0.9434657270980141, "mean_per_class_recall": 0.7304593517130928, "acc_avg": 0.7298206090927124, "acc_region:0": 0.69645094871521, "count_region:0": 2395.0, "acc_region:1": 0.720895528793335, "count_region:1": 2010.0, "acc_region:2": 0.7224835157394409, "count_region:2": 2126.0, "acc_region:3": 0.7277863621711731, "count_region:3": 1947.0, "acc_region:4": 0.7444507479667664, "count_region:4": 1757.0, "acc_region:5": 0.7705281972885132, "count_region:5": 2253.0, "acc_wg": 0.69645094871521, "main_metric": 0.69645094871521}}
{"key": "fairness/fairface", "dataset": "FairFace", "metrics": {"acc_race_avg": 0.8035420775413513, "acc_race_race_binary:0": 0.044124700129032135, "count_race_binary:0": 2085.0, "acc_race_race_binary:1": 0.9820724129676819, "count_race_binary:1": 8869.0, "acc_race_wg": 0.044124700129032135, "acc_gender_avg": 0.6624064445495605, "acc_gender_race_binary:0": 0.6949640512466431, "acc_gender_race_binary:1": 0.6547524929046631, "acc_gender_wg": 0.6547524929046631, "acc_age_avg": 0.02327916771173477, "acc_age_race_binary:0": 0.021103117614984512, "acc_age_race_binary:1": 0.02379073202610016, "acc_age_wg": 0.021103117614984512, "acc_gender_x_avg": 0.6624064445495605, "acc_gender_x_race:0_gender:0": 0.8197747468948364, "count_race:0_gender:0": 799.0, "acc_gender_x_race:0_gender:1": 0.34081903100013733, "count_race:0_gender:1": 757.0, "acc_gender_x_race:1_gender:0": 0.7852050065994263, "count_race:1_gender:0": 1122.0, "acc_gender_x_race:1_gender:1": 0.5898234844207764, "count_race:1_gender:1": 963.0, "acc_gender_x_race:2_gender:0": 0.7689242959022522, "count_race:2_gender:0": 753.0, "acc_gender_x_race:2_gender:1": 0.5150721073150635, "count_race:2_gender:1": 763.0, "acc_gender_x_race:3_gender:0": 0.7767969965934753, "count_race:3_gender:0": 793.0, "acc_gender_x_race:3_gender:1": 0.5397590398788452, "count_race:3_gender:1": 830.0, "acc_gender_x_race:4_gender:0": 0.829028308391571, "count_race:4_gender:0": 813.0, "acc_gender_x_race:4_gender:1": 0.5151515007019043, "count_race:4_gender:1": 396.0, "acc_gender_x_race:5_gender:0": 0.7020407915115356, "count_race:5_gender:0": 735.0, "acc_gender_x_race:5_gender:1": 0.6029411554336548, "count_race:5_gender:1": 680.0, "acc_gender_x_race:6_gender:0": 0.6653796434402466, "count_race:6_gender:0": 777.0, "acc_gender_x_race:6_gender:1": 0.6946960091590881, "count_race:6_gender:1": 773.0, "acc_gender_x_wg": 0.34081903100013733, "toxicity_crime_avg": 0.3281906247138977, "toxicity_crime_race:0": 0.21401028335094452, "count_race:0": 1556.0, "toxicity_crime_race:1": 0.4254196584224701, "count_race:1": 2085.0, "toxicity_crime_race:2": 0.2658311426639557, "count_race:2": 1516.0, "toxicity_crime_race:3": 0.3203943371772766, "count_race:3": 1623.0, "toxicity_crime_race:4": 0.34408602118492126, "count_race:4": 1209.0, "toxicity_crime_race:5": 0.3116607666015625, "count_race:5": 1415.0, "toxicity_crime_race:6": 0.3838709592819214, "count_race:6": 1550.0, "toxicity_crime_wg": 0.21401028335094452, "toxicity_nonhuman_avg": 0.18933722376823425, "toxicity_nonhuman_race:0": 0.3997429311275482, "toxicity_nonhuman_race:1": 0.13189448416233063, "toxicity_nonhuman_race:2": 0.2559366822242737, "toxicity_nonhuman_race:3": 0.14910659193992615, "toxicity_nonhuman_race:4": 0.19272126257419586, "toxicity_nonhuman_race:5": 0.13780918717384338, "toxicity_nonhuman_race:6": 0.07677419483661652, "toxicity_nonhuman_wg": 0.07677419483661652, "main_metric": null}}
{"key": "fairness/utkface", "dataset": "UTKFace", "metrics": {"acc_race_avg": 0.5791671872138977, "acc_race_race_binary:0": 0.030865423381328583, "count_race_binary:0": 10076.0, "acc_race_race_binary:1": 0.9845893979072571, "count_race_binary:1": 13627.0, "acc_race_wg": 0.030865423381328583, "acc_gender_avg": 0.7339577078819275, "acc_gender_race_binary:0": 0.7338229417800903, "acc_gender_race_binary:1": 0.7340573668479919, "acc_gender_wg": 0.7338229417800903, "acc_age_avg": 0.09083238244056702, "acc_age_race_binary:0": 0.11542278528213501, "acc_age_race_binary:1": 0.07264988869428635, "acc_age_wg": 0.07264988869428635, "acc_gender_x_avg": 0.7339577078819275, "acc_gender_x_race:0_gender:0": 0.8882657289505005, "count_race:0_gender:0": 2318.0, "acc_gender_x_race:0_gender:1": 0.5353260636329651, "count_race:0_gender:1": 2208.0, "acc_gender_x_race:1_gender:0": 0.8528122901916504, "count_race:1_gender:0": 5476.0, "acc_gender_x_race:1_gender:1": 0.5921739339828491, "count_race:1_gender:1": 4600.0, "acc_gender_x_race:2_gender:0": 0.8359133005142212, "count_race:2_gender:0": 2261.0, "acc_gender_x_race:2_gender:1": 0.6429404616355896, "count_race:2_gender:1": 1714.0, "acc_gender_x_race:3_gender:0": 0.7549206614494324, "count_race:3_gender:0": 1575.0, "acc_gender_x_race:3_gender:1": 0.7046799063682556, "count_race:3_gender:1": 1859.0, "acc_gender_x_race:4_gender:0": 0.7986842393875122, "count_race:4_gender:0": 760.0, "acc_gender_x_race:4_gender:1": 0.7124463319778442, "count_race:4_gender:1": 932.0, "acc_gender_x_wg": 0.5353260636329651, "toxicity_crime_avg": 0.14601527154445648, "toxicity_crime_race:0": 0.11025188118219376, "count_race:0": 4526.0, "toxicity_crime_race:1": 0.1551210731267929, "count_race:1": 10076.0, "toxicity_crime_race:2": 0.12452830374240875, "count_race:2": 3975.0, "toxicity_crime_race:3": 0.19743739068508148, "count_race:3": 3434.0, "toxicity_crime_race:4": 0.13356974720954895, "count_race:4": 1692.0, "toxicity_crime_wg": 0.11025188118219376, "toxicity_nonhuman_avg": 0.18596802651882172, "toxicity_nonhuman_race:0": 0.3720724582672119, "toxicity_nonhuman_race:1": 0.1341802328824997, "toxicity_nonhuman_race:2": 0.21786163747310638, "toxicity_nonhuman_race:3": 0.09056493639945984, "toxicity_nonhuman_race:4": 0.11524822562932968, "toxicity_nonhuman_wg": 0.09056493639945984, "main_metric": null}}