type
stringclasses 1
value | id
stringlengths 5
122
| num_branches
int64 1
1.76k
| branches
sequencelengths 1
1.76k
| main_branch_size
int64 0
32,943B
|
---|---|---|---|---|
model | AdapterHub/bert-base-uncased-pf-ud_en_ewt | 1 | [
"main"
] | 95,849,855 |
model | AdapterHub/bert-base-uncased-pf-ud_pos | 1 | [
"main"
] | 3,656,060 |
model | AdapterHub/bert-base-uncased-pf-wic | 1 | [
"main"
] | 5,969,615 |
model | AdapterHub/bert-base-uncased-pf-wikihop | 1 | [
"main"
] | 3,606,297 |
model | AdapterHub/bert-base-uncased-pf-winogrande | 1 | [
"main"
] | 5,967,037 |
model | AdapterHub/bert-base-uncased-pf-wnut_17 | 1 | [
"main"
] | 3,640,319 |
model | AdapterHub/bert-base-uncased-pf-yelp_polarity | 1 | [
"main"
] | 5,969,563 |
model | AdapterHub/bioASQyesno | 1 | [
"main"
] | 5,969,342 |
model | AdapterHub/narrativeqa | 1 | [
"main"
] | 158,215,191 |
model | AdapterHub/roberta-base-pf-anli_r3 | 1 | [
"main"
] | 5,972,486 |
model | AdapterHub/roberta-base-pf-art | 1 | [
"main"
] | 5,965,996 |
model | AdapterHub/roberta-base-pf-boolq | 1 | [
"main"
] | 5,969,869 |
model | AdapterHub/roberta-base-pf-cola | 1 | [
"main"
] | 5,969,478 |
model | AdapterHub/roberta-base-pf-commonsense_qa | 1 | [
"main"
] | 5,966,653 |
model | AdapterHub/roberta-base-pf-comqa | 1 | [
"main"
] | 3,606,258 |
model | AdapterHub/roberta-base-pf-conll2000 | 1 | [
"main"
] | 3,671,460 |
model | AdapterHub/roberta-base-pf-conll2003 | 1 | [
"main"
] | 3,628,112 |
model | AdapterHub/roberta-base-pf-conll2003_pos | 1 | [
"main"
] | 3,745,809 |
model | AdapterHub/roberta-base-pf-copa | 1 | [
"main"
] | 5,966,079 |
model | AdapterHub/roberta-base-pf-cosmos_qa | 1 | [
"main"
] | 5,966,368 |
model | AdapterHub/roberta-base-pf-cq | 1 | [
"main"
] | 3,606,114 |
model | AdapterHub/roberta-base-pf-drop | 1 | [
"main"
] | 3,606,255 |
model | AdapterHub/roberta-base-pf-duorc_p | 1 | [
"main"
] | 3,606,391 |
model | AdapterHub/roberta-base-pf-duorc_s | 1 | [
"main"
] | 3,606,391 |
model | AdapterHub/roberta-base-pf-emo | 1 | [
"main"
] | 8,357,244 |
model | AdapterHub/roberta-base-pf-emotion | 1 | [
"main"
] | 5,981,742 |
model | AdapterHub/roberta-base-pf-fce_error_detection | 1 | [
"main"
] | 3,607,020 |
model | AdapterHub/roberta-base-pf-hellaswag | 1 | [
"main"
] | 5,966,369 |
model | AdapterHub/roberta-base-pf-hotpotqa | 1 | [
"main"
] | 3,606,471 |
model | AdapterHub/roberta-base-pf-imdb | 1 | [
"main"
] | 5,969,274 |
model | AdapterHub/roberta-base-pf-mit_movie_trivia | 1 | [
"main"
] | 3,678,139 |
model | AdapterHub/roberta-base-pf-mnli | 1 | [
"main"
] | 5,972,584 |
model | AdapterHub/roberta-base-pf-mrpc | 1 | [
"main"
] | 5,969,459 |
model | AdapterHub/roberta-base-pf-multirc | 1 | [
"main"
] | 5,969,986 |
model | AdapterHub/roberta-base-pf-newsqa | 1 | [
"main"
] | 3,606,326 |
model | AdapterHub/roberta-base-pf-pmb_sem_tagging | 1 | [
"main"
] | 3,844,959 |
model | AdapterHub/roberta-base-pf-qnli | 1 | [
"main"
] | 5,969,459 |
model | AdapterHub/roberta-base-pf-qqp | 1 | [
"main"
] | 5,969,334 |
model | AdapterHub/roberta-base-pf-quail | 1 | [
"main"
] | 5,966,088 |
model | AdapterHub/roberta-base-pf-quartz | 1 | [
"main"
] | 5,966,131 |
model | AdapterHub/roberta-base-pf-quoref | 1 | [
"main"
] | 3,606,326 |
model | AdapterHub/roberta-base-pf-race | 1 | [
"main"
] | 5,966,306 |
model | AdapterHub/roberta-base-pf-record | 1 | [
"main"
] | 5,969,920 |
model | AdapterHub/roberta-base-pf-rotten_tomatoes | 1 | [
"main"
] | 5,969,874 |
model | AdapterHub/roberta-base-pf-rte | 1 | [
"main"
] | 5,969,452 |
model | AdapterHub/roberta-base-pf-scicite | 1 | [
"main"
] | 5,972,478 |
model | AdapterHub/roberta-base-pf-scitail | 1 | [
"main"
] | 5,969,416 |
model | AdapterHub/roberta-base-pf-sick | 1 | [
"main"
] | 5,972,271 |
model | AdapterHub/roberta-base-pf-snli | 1 | [
"main"
] | 5,972,337 |
model | AdapterHub/roberta-base-pf-social_i_qa | 1 | [
"main"
] | 5,966,436 |
model | AdapterHub/roberta-base-pf-squad | 1 | [
"main"
] | 3,613,012 |
model | AdapterHub/roberta-base-pf-squad_v2 | 1 | [
"main"
] | 3,606,491 |
model | AdapterHub/roberta-base-pf-sst2 | 1 | [
"main"
] | 5,969,472 |
model | AdapterHub/roberta-base-pf-stsb | 1 | [
"main"
] | 5,966,336 |
model | AdapterHub/roberta-base-pf-swag | 1 | [
"main"
] | 5,966,081 |
model | AdapterHub/roberta-base-pf-trec | 1 | [
"main"
] | 5,981,583 |
model | AdapterHub/roberta-base-pf-ud_deprel | 1 | [
"main"
] | 3,755,508 |
model | AdapterHub/roberta-base-pf-ud_en_ewt | 1 | [
"main"
] | 95,850,027 |
model | AdapterHub/roberta-base-pf-ud_pos | 1 | [
"main"
] | 3,657,032 |
model | AdapterHub/roberta-base-pf-wic | 1 | [
"main"
] | 5,969,723 |
model | AdapterHub/roberta-base-pf-wikihop | 1 | [
"main"
] | 3,606,405 |
model | AdapterHub/roberta-base-pf-winogrande | 1 | [
"main"
] | 5,967,209 |
model | AdapterHub/roberta-base-pf-wnut_17 | 1 | [
"main"
] | 3,640,427 |
model | AdapterHub/roberta-base-pf-yelp_polarity | 1 | [
"main"
] | 5,969,671 |
model | Adarsh123/distilbert-base-uncased-finetuned-ner | 1 | [
"main"
] | 737 |
model | Addixz/Sanyx | 1 | [
"main"
] | 1,175 |
model | Adharsh2608/DialoGPT-small-harrypotter | 1 | [
"main"
] | 1,175 |
model | AdharshJolly/HarryPotterBot-Model | 1 | [
"main"
] | 1,023,377,719 |
model | Adi2K/Priv-Consent | 1 | [
"main"
] | 438,728,145 |
model | AdiShenoy0807/DialoGPT-medium-joshua | 1 | [
"main"
] | 737 |
model | Adielcane/Adiel | 1 | [
"main"
] | 1,175 |
model | Adielcane/Adielcane | 1 | [
"main"
] | 1,175 |
model | Adil617/wav2vec2-base-timit-demo-colab | 1 | [
"main"
] | 377,706,022 |
model | Adinda/Adinda | 1 | [
"main"
] | 1,208 |
model | Adityanawal/testmodel_1 | 1 | [
"main"
] | 1,175 |
model | Adnan/UrduNewsHeadlines | 1 | [
"main"
] | 690 |
model | AdrianGzz/DialoGPT-small-harrypotter | 1 | [
"main"
] | 513,017,985 |
model | Adrianaforididk/Jinx | 1 | [
"main"
] | 1,175 |
model | Advertisement/FischlUWU | 1 | [
"main"
] | 1,175 |
model | Aero/Tsubomi-Haruno | 1 | [
"main"
] | 1,447,198,325 |
model | Aeroxas/Botroxas-small | 1 | [
"main"
] | 737 |
model | Aeskybunnie/Me | 1 | [
"main"
] | 1,261,562 |
model | AetherIT/DialoGPT-small-Hal | 1 | [
"main"
] | 1,211 |
model | AethiQs-Max/AethiQs_GemBERT_bertje_50k | 1 | [
"main"
] | 437,477,133 |
model | AethiQs-Max/aethiqs-base_bertje-data_rotterdam-epochs_10 | 1 | [
"main"
] | 437,479,710 |
model | AethiQs-Max/aethiqs-base_bertje-data_rotterdam-epochs_30-epoch_30 | 1 | [
"main"
] | 437,477,306 |
model | AethiQs-Max/cross_encoder | 1 | [
"main"
] | 1,175 |
model | AethiQs-Max/s3-v1-20_epochs | 1 | [
"main"
] | 437,477,306 |
model | Aftabhussain/Tomato_Leaf_Classifier | 1 | [
"main"
] | 343,313,269 |
model | Ahda/M | 1 | [
"main"
] | 1,175 |
model | Ahmad/parsT5-base | 1 | [
"main"
] | 991,312,212 |
model | Ahmad/parsT5 | 1 | [
"main"
] | 993,379,313 |
model | Ahmadatiya97/Alannah | 1 | [
"main"
] | 1,175 |
model | Ahmadvakili/A | 1 | [
"main"
] | 1,175 |
model | Ahmed59/Demo-Team-5-SIAD | 1 | [
"main"
] | 541,798,324 |
model | AhmedBou/TuniBert | 1 | [
"main"
] | 442,902,550 |
model | AhmedHassan19/model | 1 | [
"main"
] | 1,175 |
model | AhmedSSoliman/MarianCG-CoNaLa | 1 | [
"main"
] | 945,414,536 |
model | Ahmedahmed/Wewe | 1 | [
"main"
] | 1,175 |
model | Ahren09/distilbert-base-uncased-finetuned-cola | 1 | [
"main"
] | 268,552,124 |