Amanda commited on
Commit
b12c5f2
1 Parent(s): 8f15354

Update results.py

Browse files
Files changed (1) hide show
  1. results.py +92 -92
results.py CHANGED
@@ -36,98 +36,98 @@ DEV_SPLIT = ["CmedqaRetrieval", "CovidRetrieval", "DuRetrieval", "EcomRetrieval"
36
 
37
 
38
 
39
- MODELS = ['instructor-base',
40
- 'xlm-roberta-large',
41
- 'gtr-t5-large',
42
- 'sentence-t5-xxl',
43
- 'GIST-Embedding-v0',
44
- 'e5-base',
45
- 'mxbai-embed-2d-large-v1',
46
- 'SGPT-5.8B-weightedmean-nli-bitfit',
47
- '.DS_Store',
48
- 'jina-embeddings-v2-base-de',
49
- 'gte-base',
50
- 'jina-embedding-b-en-v1',
51
- 'LaBSE',
52
- 'sgpt-bloom-7b1-msmarco',
53
- 'bi-cse',
54
- 'distilbert-base-uncased',
55
- 'bert-base-10lang-cased',
56
- 'sentence-t5-large',
57
- 'jina-embeddings-v2-small-en',
58
- 'e5-mistral-7b-instruct',
59
- 'bge-base-en-v1.5',
60
- 'ember-v1',
61
- 'e5-large-v2',
62
- 'lodestone-base-4096-v1',
63
- 'all-mpnet-base-v2',
64
- 'sentence-t5-xl',
65
- 'distilbert-base-en-fr-cased',
66
- 'gte-tiny',
67
- 'text2vec-base-multilingual',
68
- 'GIST-all-MiniLM-L6-v2',
69
- 'jina-embeddings-v2-base-es',
70
- 'bert-base-multilingual-uncased',
71
- 'distiluse-base-multilingual-cased-v2',
72
- 'sup-simcse-bert-base-uncased',
73
- 'e5-small-v2',
74
- 'GritLM-7B',
75
- 'sentence-t5-base',
76
- 'SFR-Embedding-Mistral',
77
- 'mxbai-embed-large-v1',
78
- 'stella-base-en-v2',
79
- 'udever-bloom-3b',
80
- 'bert-base-multilingual-cased',
81
- 'all-MiniLM-L12-v2',
82
- 'sf_model_e5',
83
- 'bert-base-portuguese-cased',
84
- 'bge-small-en-v1.5',
85
- 'SGPT-125M-weightedmean-msmarco-specb-bitfit',
86
- 'udever-bloom-560m',
87
- 'gtr-t5-base',
88
- 'fin-mpnet-base',
89
- 'SGPT-2.7B-weightedmean-msmarco-specb-bitfit',
90
- 'xlm-roberta-base',
91
- 'GIST-small-Embedding-v0',
92
- 'gte-large',
93
- 'ALL_862873',
94
- 'e5-large',
95
- 'distilbert-base-en-fr-es-pt-it-cased',
96
- 'dfm-sentence-encoder-large-v1',
97
- 'bge-micro',
98
- 'instructor-large',
99
- 'average_word_embeddings_glove.6B.300d',
100
- 'multilingual-e5-large-instruct',
101
- 'msmarco-bert-co-condensor',
102
- 'multilingual-e5-small',
103
- 'UAE-Large-V1',
104
- 'udever-bloom-1b1',
105
- 'distilbert-base-fr-cased',
106
- 'instructor-xl',
107
- 'bert-base-uncased',
108
- 'all-MiniLM-L6-v2',
109
- 'e5-base-v2',
110
- 'jina-embedding-l-en-v1',
111
- 'gtr-t5-xl',
112
- 'gte-small',
113
- 'bge-small-4096',
114
- 'average_word_embeddings_komninos',
115
- 'unsup-simcse-bert-base-uncased',
116
- 'bert-base-15lang-cased',
117
- 'paraphrase-multilingual-MiniLM-L12-v2',
118
- 'distilbert-base-25lang-cased',
119
- 'contriever-base-msmarco',
120
- 'multilingual-e5-large',
121
- 'luotuo-bert-medium',
122
- 'GIST-large-Embedding-v0',
123
- 'bge-large-en-v1.5',
124
- 'cai-lunaris-text-embeddings',
125
- 'gtr-t5-xxl',
126
- 'multilingual-e5-base',
127
- 'paraphrase-multilingual-mpnet-base-v2',
128
- 'SGPT-1.3B-weightedmean-msmarco-specb-bitfit',
129
- 'e5-dansk-test-0.1',
130
- 'allenai-specter'
131
  ]
132
  from pathlib import Path
133
 
 
36
 
37
 
38
 
39
+ MODELS = [
40
+ "instructor-base",
41
+ "xlm-roberta-large",
42
+ "gtr-t5-large",
43
+ "sentence-t5-xxl",
44
+ "GIST-Embedding-v0",
45
+ "e5-base",
46
+ "mxbai-embed-2d-large-v1",
47
+ "SGPT-5.8B-weightedmean-nli-bitfit",
48
+ "jina-embeddings-v2-base-de",
49
+ "gte-base",
50
+ "jina-embedding-b-en-v1",
51
+ "LaBSE",
52
+ "sgpt-bloom-7b1-msmarco",
53
+ "bi-cse",
54
+ "distilbert-base-uncased",
55
+ "bert-base-10lang-cased",
56
+ "sentence-t5-large",
57
+ "jina-embeddings-v2-small-en",
58
+ "e5-mistral-7b-instruct",
59
+ "bge-base-en-v1.5",
60
+ "ember-v1",
61
+ "e5-large-v2",
62
+ "lodestone-base-4096-v1",
63
+ "all-mpnet-base-v2",
64
+ "sentence-t5-xl",
65
+ "distilbert-base-en-fr-cased",
66
+ "gte-tiny",
67
+ "text2vec-base-multilingual",
68
+ "GIST-all-MiniLM-L6-v2",
69
+ "jina-embeddings-v2-base-es",
70
+ "bert-base-multilingual-uncased",
71
+ "distiluse-base-multilingual-cased-v2",
72
+ "sup-simcse-bert-base-uncased",
73
+ "e5-small-v2",
74
+ "GritLM-7B",
75
+ "sentence-t5-base",
76
+ "SFR-Embedding-Mistral",
77
+ "mxbai-embed-large-v1",
78
+ "stella-base-en-v2",
79
+ "udever-bloom-3b",
80
+ "bert-base-multilingual-cased",
81
+ "all-MiniLM-L12-v2",
82
+ "sf_model_e5",
83
+ "bert-base-portuguese-cased",
84
+ "bge-small-en-v1.5",
85
+ "SGPT-125M-weightedmean-msmarco-specb-bitfit",
86
+ "udever-bloom-560m",
87
+ "gtr-t5-base",
88
+ "fin-mpnet-base",
89
+ "SGPT-2.7B-weightedmean-msmarco-specb-bitfit",
90
+ "xlm-roberta-base",
91
+ "GIST-small-Embedding-v0",
92
+ "gte-large",
93
+ "ALL_862873",
94
+ "e5-large",
95
+ "distilbert-base-en-fr-es-pt-it-cased",
96
+ "dfm-sentence-encoder-large-v1",
97
+ "bge-micro",
98
+ "instructor-large",
99
+ "average_word_embeddings_glove.6B.300d",
100
+ "multilingual-e5-large-instruct",
101
+ "msmarco-bert-co-condensor",
102
+ "multilingual-e5-small",
103
+ "UAE-Large-V1",
104
+ "udever-bloom-1b1",
105
+ "distilbert-base-fr-cased",
106
+ "instructor-xl",
107
+ "bert-base-uncased",
108
+ "all-MiniLM-L6-v2",
109
+ "e5-base-v2",
110
+ "jina-embedding-l-en-v1",
111
+ "gtr-t5-xl",
112
+ "gte-small",
113
+ "bge-small-4096",
114
+ "average_word_embeddings_komninos",
115
+ "unsup-simcse-bert-base-uncased",
116
+ "bert-base-15lang-cased",
117
+ "paraphrase-multilingual-MiniLM-L12-v2",
118
+ "distilbert-base-25lang-cased",
119
+ "contriever-base-msmarco",
120
+ "multilingual-e5-large",
121
+ "luotuo-bert-medium",
122
+ "GIST-large-Embedding-v0",
123
+ "bge-large-en-v1.5",
124
+ "cai-lunaris-text-embeddings",
125
+ "gtr-t5-xxl",
126
+ "multilingual-e5-base",
127
+ "paraphrase-multilingual-mpnet-base-v2",
128
+ "SGPT-1.3B-weightedmean-msmarco-specb-bitfit",
129
+ "e5-dansk-test-0.1",
130
+ "allenai-specter"
131
  ]
132
  from pathlib import Path
133