tonychenxyz commited on
Commit
9e34a62
1 Parent(s): 020aa91
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. Dockerfile +33 -0
  2. LICENSE +201 -0
  3. all_emo_dirs.pkl +3 -0
  4. app.py +364 -0
  5. assets/favicon.ico +0 -0
  6. assets/logo.png +0 -0
  7. cache/.locks/models--Salesforce--SFR-Embedding-Mistral/42dcdfcaf9e42a488d4be06500dd771d7aa11e83.lock +0 -0
  8. cache/.locks/models--Salesforce--SFR-Embedding-Mistral/afbfcebcf9df8c0af538cd5b6f616bd1d7a9739eba4b81d871545b1b562d6b0a.lock +0 -0
  9. cache/.locks/models--Salesforce--SFR-Embedding-Mistral/c19160bba3c1267f959caf6d13fb07f9ea232e04.lock +0 -0
  10. cache/.locks/models--Salesforce--SFR-Embedding-Mistral/ef62bf21fb2396937098b86ae80c68813b229c18.lock +0 -0
  11. cache/.locks/models--Salesforce--SFR-Embedding-Mistral/f7640f94e81bb7f4f04daf1668850b38763a13d9.lock +0 -0
  12. cache/.locks/models--Salesforce--SFR-Embedding-Mistral/f8194e4e9432d287bf257d4a7d4a0f2446c32da8.lock +0 -0
  13. cache/.locks/models--Salesforce--SFR-Embedding-Mistral/feb95adc7e79e878999ba5a1d3ddfe9f16eff0f1.lock +0 -0
  14. cache/models--Salesforce--SFR-Embedding-Mistral/.no_exist/938c560d1c236aa563b2dbdf084f28ab28bccb11/model.safetensors +0 -0
  15. cache/models--Salesforce--SFR-Embedding-Mistral/blobs/42dcdfcaf9e42a488d4be06500dd771d7aa11e83 +4 -0
  16. cache/models--Salesforce--SFR-Embedding-Mistral/blobs/c19160bba3c1267f959caf6d13fb07f9ea232e04 +27 -0
  17. cache/models--Salesforce--SFR-Embedding-Mistral/blobs/ef62bf21fb2396937098b86ae80c68813b229c18 +7 -0
  18. cache/models--Salesforce--SFR-Embedding-Mistral/blobs/f7640f94e81bb7f4f04daf1668850b38763a13d9 +14 -0
  19. cache/models--Salesforce--SFR-Embedding-Mistral/blobs/f8194e4e9432d287bf257d4a7d4a0f2446c32da8 +297 -0
  20. cache/models--Salesforce--SFR-Embedding-Mistral/blobs/feb95adc7e79e878999ba5a1d3ddfe9f16eff0f1 +3398 -0
  21. cache/models--Salesforce--SFR-Embedding-Mistral/refs/main +1 -0
  22. cache/models--Salesforce--SFR-Embedding-Mistral/snapshots/938c560d1c236aa563b2dbdf084f28ab28bccb11/README.md +1 -0
  23. cache/models--Salesforce--SFR-Embedding-Mistral/snapshots/938c560d1c236aa563b2dbdf084f28ab28bccb11/config.json +1 -0
  24. cache/models--Salesforce--SFR-Embedding-Mistral/snapshots/938c560d1c236aa563b2dbdf084f28ab28bccb11/config_sentence_transformers.json +1 -0
  25. cache/models--Salesforce--SFR-Embedding-Mistral/snapshots/938c560d1c236aa563b2dbdf084f28ab28bccb11/model.safetensors.index.json +1 -0
  26. cache/models--Salesforce--SFR-Embedding-Mistral/snapshots/938c560d1c236aa563b2dbdf084f28ab28bccb11/modules.json +1 -0
  27. cache/models--Salesforce--SFR-Embedding-Mistral/snapshots/938c560d1c236aa563b2dbdf084f28ab28bccb11/sentence_bert_config.json +1 -0
  28. docker-compose.yml +61 -0
  29. emo-knob-teaser-1.svg +0 -0
  30. fam/__init__.py +0 -0
  31. fam/__pycache__/__init__.cpython-310.pyc +0 -0
  32. fam/__pycache__/__init__.cpython-39.pyc +0 -0
  33. fam/llm/__init__.py +0 -0
  34. fam/llm/__pycache__/__init__.cpython-310.pyc +0 -0
  35. fam/llm/__pycache__/__init__.cpython-39.pyc +0 -0
  36. fam/llm/__pycache__/decoders.cpython-310.pyc +0 -0
  37. fam/llm/__pycache__/decoders.cpython-39.pyc +0 -0
  38. fam/llm/__pycache__/enhancers.cpython-310.pyc +0 -0
  39. fam/llm/__pycache__/enhancers.cpython-39.pyc +0 -0
  40. fam/llm/__pycache__/fast_inference.cpython-310.pyc +0 -0
  41. fam/llm/__pycache__/fast_inference.cpython-39.pyc +0 -0
  42. fam/llm/__pycache__/fast_inference_utils.cpython-310.pyc +0 -0
  43. fam/llm/__pycache__/fast_inference_utils.cpython-39.pyc +0 -0
  44. fam/llm/__pycache__/fast_model.cpython-310.pyc +0 -0
  45. fam/llm/__pycache__/fast_model.cpython-39.pyc +0 -0
  46. fam/llm/__pycache__/inference.cpython-310.pyc +0 -0
  47. fam/llm/__pycache__/inference.cpython-39.pyc +0 -0
  48. fam/llm/__pycache__/model.cpython-310.pyc +0 -0
  49. fam/llm/__pycache__/model.cpython-39.pyc +0 -0
  50. fam/llm/__pycache__/utils.cpython-310.pyc +0 -0
Dockerfile ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM nvidia/cuda:12.1.0-devel-ubuntu22.04 as base
2
+
3
+ # Install system dependencies in a single RUN command to reduce layers
4
+ # Combine apt-get update, upgrade, and installation of packages. Clean up in the same layer to reduce image size.
5
+ RUN apt-get update && \
6
+ apt-get upgrade -y && \
7
+ apt-get install -y python3.10 python3-pip git wget curl build-essential && \
8
+ apt-get autoremove -y && \
9
+ apt-get clean && \
10
+ rm -rf /var/lib/apt/lists/*
11
+
12
+ # install ffmpeg
13
+ RUN wget https://johnvansickle.com/ffmpeg/builds/ffmpeg-git-amd64-static.tar.xz &&\
14
+ wget https://johnvansickle.com/ffmpeg/builds/ffmpeg-git-amd64-static.tar.xz.md5 &&\
15
+ md5sum -c ffmpeg-git-amd64-static.tar.xz.md5 &&\
16
+ tar xvf ffmpeg-git-amd64-static.tar.xz &&\
17
+ mv ffmpeg-git-*-static/ffprobe ffmpeg-git-*-static/ffmpeg /usr/local/bin/ &&\
18
+ rm -rf ffmpeg-git-*
19
+
20
+ WORKDIR /app
21
+
22
+ COPY requirements.txt requirements.txt
23
+
24
+ RUN pip install --no-cache-dir packaging wheel torch
25
+ RUN pip install --no-cache-dir audiocraft # HACK: installation fails within the requirements.txt
26
+ RUN pip install --no-cache-dir -r requirements.txt
27
+ RUN pip install --no-cache-dir --upgrade torch torchaudio
28
+
29
+ COPY . .
30
+
31
+ RUN pip install --no-cache-dir -e .
32
+
33
+ ENTRYPOINT ["python3.10", "serving.py"]
LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright [yyyy] [name of copyright owner]
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
all_emo_dirs.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:beadd1f3c7eada0fa99dbdecc5c370036c1c044955a02f019f879bdc6f5fefcb
3
+ size 20343
app.py ADDED
@@ -0,0 +1,364 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import os
3
+
4
+
5
+ is_prod = True
6
+ if os.environ.get('PROD_MODE') == 'local':
7
+ is_prod = False
8
+
9
+ import pickle
10
+
11
+ if not is_prod:
12
+ import os
13
+ os.environ['HF_HOME'] = '/proj/afosr/metavoice/cache'
14
+ os.environ['TRANSFORMERS_CACHE'] = '/proj/afosr/metavoice/cache'
15
+ os.environ['HF_DATASETS_CACHE'] = '/proj/afosr/metavoice/cache'
16
+ os.environ['HF_METRICS_CACHE'] = '/proj/afosr/metavoice/cache'
17
+ os.environ['HF_MODULES_CACHE'] = '/proj/afosr/metavoice/cache'
18
+ ffmpeg_path = '/home/hc3295/ffmpeg_build/bin'
19
+ os.environ['PATH'] += os.pathsep + ffmpeg_path
20
+
21
+
22
+ import shutil
23
+ import tempfile
24
+ import time
25
+ from pathlib import Path
26
+
27
+ import librosa
28
+ import torch
29
+ from huggingface_hub import snapshot_download
30
+
31
+ from fam.llm.adapters import FlattenedInterleavedEncodec2Codebook
32
+ from fam.llm.decoders import EncodecDecoder
33
+ from fam.llm.fast_inference_utils import build_model, main
34
+ from fam.llm.inference import (
35
+ EncodecDecoder,
36
+ InferenceConfig,
37
+ Model,
38
+ TiltedEncodec,
39
+ TrainedBPETokeniser,
40
+ get_cached_embedding,
41
+ get_cached_file,
42
+ get_enhancer,
43
+ )
44
+ from fam.llm.utils import (
45
+ check_audio_file,
46
+ get_default_dtype,
47
+ get_device,
48
+ normalize_text,
49
+ )
50
+
51
+ debug = False
52
+ if not debug:
53
+ model_name = "metavoiceio/metavoice-1B-v0.1"
54
+ seed = 1337
55
+ output_dir = "outputs"
56
+ _dtype = get_default_dtype()
57
+ _device = 'cuda:0'
58
+ _model_dir = snapshot_download(repo_id=model_name)
59
+ first_stage_adapter = FlattenedInterleavedEncodec2Codebook(end_of_audio_token=1024)
60
+ output_dir = output_dir
61
+ os.makedirs(output_dir, exist_ok=True)
62
+
63
+ second_stage_ckpt_path = f"{_model_dir}/second_stage.pt"
64
+ config_second_stage = InferenceConfig(
65
+ ckpt_path=second_stage_ckpt_path,
66
+ num_samples=1,
67
+ seed=seed,
68
+ device=_device,
69
+ dtype=_dtype,
70
+ compile=False,
71
+ init_from="resume",
72
+ output_dir=output_dir,
73
+ )
74
+ data_adapter_second_stage = TiltedEncodec(end_of_audio_token=1024)
75
+ llm_second_stage = Model(
76
+ config_second_stage, TrainedBPETokeniser, EncodecDecoder, data_adapter_fn=data_adapter_second_stage.decode
77
+ )
78
+ enhancer = get_enhancer("df")
79
+
80
+ precision = {"float16": torch.float16, "bfloat16": torch.bfloat16}[_dtype]
81
+ model, tokenizer, smodel, model_size = build_model(
82
+ precision=precision,
83
+ checkpoint_path=Path(f"{_model_dir}/first_stage.pt"),
84
+ spk_emb_ckpt_path=Path(f"{_model_dir}/speaker_encoder.pt"),
85
+ device=_device,
86
+ compile=True,
87
+ compile_prefill=True,
88
+ )
89
+
90
+
91
+ def generate_sample(text, emo_dir = None, source_path = None, emo_path = None, neutral_path = None, strength = 0.1, top_p = 0.95, guidance_scale = 3.0, preset_dropdown = None, toggle = None):
92
+
93
+ print('text', text)
94
+ print('emo_dir', emo_dir)
95
+ print('source_path', source_path)
96
+ print('emo_path', emo_path)
97
+ print('neutral_path', neutral_path)
98
+ print('strength', strength)
99
+ print('top_p', top_p)
100
+ print('guidance_scale', guidance_scale)
101
+
102
+ if toggle == RADIO_CHOICES[0]:
103
+ source_path = PRESET_VOICES[preset_dropdown]
104
+ source_path = get_cached_file(source_path)
105
+ check_audio_file(source_path)
106
+ source_emb = get_cached_embedding(source_path, smodel).to(device=_device, dtype=precision)
107
+
108
+ if emo_dir == EMO_NAMES[0]:
109
+ emo_path = get_cached_file(emo_path)
110
+ check_audio_file(emo_path)
111
+ emo_emb = get_cached_embedding(emo_path, smodel).to(device=_device, dtype=precision)
112
+
113
+ neutral_path = get_cached_file(neutral_path)
114
+ check_audio_file(neutral_path)
115
+ neutral_emb = get_cached_embedding(neutral_path, smodel).to(device=_device, dtype=precision)
116
+
117
+ emo_dir = emo_emb - neutral_emb
118
+ emo_dir = emo_dir / torch.norm(emo_dir, p=2)
119
+ else:
120
+ emo_dir = torch.tensor(ALL_EMO_DIRS[emo_dir], device=_device, dtype=precision)
121
+
122
+
123
+ edited_emb = source_emb + strength * emo_dir
124
+ edited_emb = edited_emb.to(device=_device, dtype=precision)
125
+
126
+ temperature=1.0
127
+ text = normalize_text(text)
128
+
129
+ start = time.time()
130
+ # first stage LLM
131
+ tokens = main(
132
+ model=model,
133
+ tokenizer=tokenizer,
134
+ model_size=model_size,
135
+ prompt=text,
136
+ spk_emb=edited_emb,
137
+ top_p=torch.tensor(top_p, device=_device, dtype=precision),
138
+ guidance_scale=torch.tensor(guidance_scale, device=_device, dtype=precision),
139
+ temperature=torch.tensor(temperature, device=_device, dtype=precision),
140
+ )
141
+ text_ids, extracted_audio_ids = first_stage_adapter.decode([tokens])
142
+
143
+ b_speaker_embs = edited_emb.unsqueeze(0)
144
+
145
+ # second stage LLM + multi-band diffusion model
146
+ wav_files = llm_second_stage(
147
+ texts=[text],
148
+ encodec_tokens=[torch.tensor(extracted_audio_ids, dtype=torch.int32, device=_device).unsqueeze(0)],
149
+ speaker_embs=b_speaker_embs,
150
+ batch_size=1,
151
+ guidance_scale=None,
152
+ top_p=None,
153
+ top_k=200,
154
+ temperature=1.0,
155
+ max_new_tokens=None,
156
+ )
157
+
158
+ wav_file = wav_files[0]
159
+ with tempfile.NamedTemporaryFile(suffix=".wav") as enhanced_tmp:
160
+ enhancer(str(wav_file) + ".wav", enhanced_tmp.name)
161
+ shutil.copy2(enhanced_tmp.name, str(wav_file) + ".wav")
162
+ print(f"\nSaved audio to {wav_file}.wav")
163
+
164
+ output_path = str(wav_file) + ".wav"
165
+ return output_path
166
+
167
+
168
+ ALL_EMO_DIRS = pickle.load(open('all_emo_dirs.pkl', 'rb'))
169
+ EMO_NAMES = ['Upload your own sample'] + list(ALL_EMO_DIRS.keys())
170
+
171
+ RADIO_CHOICES = ["Preset voices", "Upload your voice"]
172
+ MAX_CHARS = 220
173
+ PRESET_VOICES = {
174
+ # female
175
+ "Bria": "https://cdn.themetavoice.xyz/speakers%2Fbria.mp3",
176
+ # male
177
+ "Alex": "https://cdn.themetavoice.xyz/speakers/alex.mp3",
178
+ "Jacob": "https://cdn.themetavoice.xyz/speakers/jacob.wav",
179
+ }
180
+
181
+
182
+ def denormalise_top_p(top_p):
183
+ # returns top_p in the range [0.9, 1.0]
184
+ return round(0.9 + top_p / 100, 2)
185
+
186
+
187
+ def denormalise_guidance(guidance):
188
+ # returns guidance in the range [1.0, 3.0]
189
+ return 1 + ((guidance - 1) * (3 - 1)) / (5 - 1)
190
+
191
+
192
+ def _check_file_size(path):
193
+ if not path:
194
+ return
195
+ filesize = os.path.getsize(path)
196
+ filesize_mb = filesize / 1024 / 1024
197
+ if filesize_mb >= 50:
198
+ raise gr.Error(f"Please upload a sample less than 20MB for voice cloning. Provided: {round(filesize_mb)} MB")
199
+
200
+
201
+ def _handle_edge_cases(to_say, upload_target):
202
+ if not to_say:
203
+ raise gr.Error("Please provide text to synthesise")
204
+
205
+ if len(to_say) > MAX_CHARS:
206
+ gr.Warning(
207
+ f"Max {MAX_CHARS} characters allowed. Provided: {len(to_say)} characters. Truncating and generating speech...Result at the end can be unstable as a result."
208
+ )
209
+
210
+ if not upload_target:
211
+ return
212
+
213
+ check_audio_file(upload_target) # check file duration to be atleast 30s
214
+ _check_file_size(upload_target)
215
+
216
+
217
+ def tts(to_say, top_p, guidance, toggle, preset_dropdown, upload_target):
218
+ try:
219
+ d_top_p = denormalise_top_p(top_p)
220
+ d_guidance = denormalise_guidance(guidance)
221
+
222
+ _handle_edge_cases(to_say, upload_target)
223
+
224
+ to_say = to_say if len(to_say) < MAX_CHARS else to_say[:MAX_CHARS]
225
+
226
+ return TTS_MODEL.synthesise(
227
+ text=to_say,
228
+ spk_ref_path=PRESET_VOICES[preset_dropdown] if toggle == RADIO_CHOICES[0] else upload_target,
229
+ top_p=d_top_p,
230
+ guidance_scale=d_guidance,
231
+ )
232
+ except Exception as e:
233
+ raise gr.Error(f"Something went wrong. Reason: {str(e)}")
234
+
235
+
236
+ def change_voice_selection_layout(choice):
237
+ if choice == RADIO_CHOICES[0]:
238
+ return [gr.update(visible=True), gr.update(visible=False)]
239
+
240
+ return [gr.update(visible=False), gr.update(visible=True)]
241
+
242
+ def change_emotion_selection_layout(choice):
243
+ if choice == EMO_NAMES[0]:
244
+ return [gr.update(visible=True)]
245
+
246
+ return [gr.update(visible=False)]
247
+
248
+ title = """
249
+ </style>
250
+ <h1 style="margin-top: 10px;" class="page-title">Demo for <span style="margin-left: 10px;background-color: #E0FEE4;padding: 15px;border-radius: 10px;">🎛️ EmoKnob</span></h1>
251
+ """
252
+
253
+ description = """
254
+ - While existing TTS services do not allow fine-grained control over emotions, EmoKnob allows users to control emotion in speech with few-shot samples.
255
+ - In this demo, you can select from a few preset voices and upload your own emotional samples to clone.
256
+ - You can then use preset emotion or upload your own emotional-neutral sample pair to control emotions.
257
+ - You can adjust the strength of the emotion by using the slider.
258
+
259
+
260
+ EmoKnob is uses [MetaVoice](https://github.com/metavoiceio/metavoice-src) as voice cloning backbone.
261
+ """
262
+
263
+ with gr.Blocks(title="EmoKnob Demo") as demo:
264
+ gr.Markdown(title)
265
+ gr.Image("emo-knob-teaser-1.svg", show_label=False, container=False)
266
+
267
+ with gr.Row():
268
+ gr.Markdown(description)
269
+
270
+ with gr.Row():
271
+ with gr.Column():
272
+ to_say = gr.TextArea(
273
+ label=f"What should I say!? (max {MAX_CHARS} characters).",
274
+ lines=4,
275
+ value="To be or not to be, that is the question.",
276
+ )
277
+
278
+
279
+
280
+ with gr.Row(), gr.Column():
281
+ # voice settings
282
+ top_p = gr.Slider(
283
+ value=0.95,
284
+ minimum=0.0,
285
+ maximum=10.0,
286
+ step=1.0,
287
+ label="Speech Stability - improves text following for a challenging speaker",
288
+ )
289
+ guidance = gr.Slider(
290
+ value=3.0,
291
+ minimum=1.0,
292
+ maximum=5.0,
293
+ step=1.0,
294
+ label="Speaker similarity - How closely to match speaker identity and speech style.",
295
+ )
296
+
297
+ strength = gr.Slider(
298
+ value=0.1,
299
+ minimum=0.0,
300
+ maximum=5.0,
301
+ step=0.01,
302
+ label="Strength - how strong the emotion is. Setting it to too large a value may result in unstable output.",
303
+ )
304
+
305
+
306
+
307
+ # voice select
308
+ toggle = gr.Radio(choices=RADIO_CHOICES, label="Choose voice", value=RADIO_CHOICES[0])
309
+
310
+ with gr.Row(visible=True) as row_1:
311
+ preset_dropdown = gr.Dropdown(
312
+ PRESET_VOICES.keys(), label="Preset voices", value=list(PRESET_VOICES.keys())[0]
313
+ )
314
+ with gr.Accordion("Preview: Preset voices", open=False):
315
+ for label, path in PRESET_VOICES.items():
316
+ gr.Audio(value=path, label=label)
317
+
318
+ with gr.Row(visible=False) as row_2:
319
+ upload_target = gr.Audio(
320
+ sources=["upload"],
321
+ type="filepath",
322
+ label="Upload a clean sample to clone.",
323
+ )
324
+ with gr.Row():
325
+ emotion_name = gr.Radio(choices=EMO_NAMES, label="Emotion", value=EMO_NAMES[0])
326
+ with gr.Row(visible=True) as row_3:
327
+ upload_neutral = gr.Audio(
328
+ sources=["upload"],
329
+ type="filepath",
330
+ label="Upload a neutral sample to compute the emotion direction. Should be same speaker as the emotional sample.",
331
+ )
332
+
333
+ upload_emo = gr.Audio(
334
+ sources=["upload"],
335
+ type="filepath",
336
+ label="Upload an emotional sample to compute the emotion direction. Should be same speaker as the neutral sample.",
337
+ )
338
+
339
+ toggle.change(
340
+ change_voice_selection_layout,
341
+ inputs=toggle,
342
+ outputs=[row_1, row_2],
343
+ )
344
+
345
+ # emotion_name.change(
346
+ # change_emotion_selection_layout,
347
+ # inputs=emotion_name,
348
+ # outputs=[row_3],
349
+ # )
350
+
351
+ with gr.Column():
352
+ speech = gr.Audio(
353
+ type="filepath",
354
+ label="Model says...",
355
+ )
356
+
357
+ submit = gr.Button("Generate Speech")
358
+ submit.click(
359
+ fn=generate_sample,
360
+ inputs=[to_say, emotion_name, upload_target, upload_emo, upload_neutral, strength, top_p, guidance, preset_dropdown, toggle],
361
+ outputs=speech,
362
+ )
363
+
364
+ demo.launch()
assets/favicon.ico ADDED
assets/logo.png ADDED
cache/.locks/models--Salesforce--SFR-Embedding-Mistral/42dcdfcaf9e42a488d4be06500dd771d7aa11e83.lock ADDED
File without changes
cache/.locks/models--Salesforce--SFR-Embedding-Mistral/afbfcebcf9df8c0af538cd5b6f616bd1d7a9739eba4b81d871545b1b562d6b0a.lock ADDED
File without changes
cache/.locks/models--Salesforce--SFR-Embedding-Mistral/c19160bba3c1267f959caf6d13fb07f9ea232e04.lock ADDED
File without changes
cache/.locks/models--Salesforce--SFR-Embedding-Mistral/ef62bf21fb2396937098b86ae80c68813b229c18.lock ADDED
File without changes
cache/.locks/models--Salesforce--SFR-Embedding-Mistral/f7640f94e81bb7f4f04daf1668850b38763a13d9.lock ADDED
File without changes
cache/.locks/models--Salesforce--SFR-Embedding-Mistral/f8194e4e9432d287bf257d4a7d4a0f2446c32da8.lock ADDED
File without changes
cache/.locks/models--Salesforce--SFR-Embedding-Mistral/feb95adc7e79e878999ba5a1d3ddfe9f16eff0f1.lock ADDED
File without changes
cache/models--Salesforce--SFR-Embedding-Mistral/.no_exist/938c560d1c236aa563b2dbdf084f28ab28bccb11/model.safetensors ADDED
File without changes
cache/models--Salesforce--SFR-Embedding-Mistral/blobs/42dcdfcaf9e42a488d4be06500dd771d7aa11e83 ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 4096,
3
+ "do_lower_case": false
4
+ }
cache/models--Salesforce--SFR-Embedding-Mistral/blobs/c19160bba3c1267f959caf6d13fb07f9ea232e04 ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "intfloat/e5-mistral-7b-instruct",
3
+ "architectures": [
4
+ "MistralModel"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 32768,
14
+ "model_type": "mistral",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 32,
17
+ "num_key_value_heads": 8,
18
+ "pad_token_id": 2,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_theta": 10000.0,
21
+ "sliding_window": 4096,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "float16",
24
+ "transformers_version": "4.37.0",
25
+ "use_cache": false,
26
+ "vocab_size": 32000
27
+ }
cache/models--Salesforce--SFR-Embedding-Mistral/blobs/ef62bf21fb2396937098b86ae80c68813b229c18 ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "2.2.2",
4
+ "transformers": "4.37.2",
5
+ "pytorch": "2.1.0+cu121"
6
+ }
7
+ }
cache/models--Salesforce--SFR-Embedding-Mistral/blobs/f7640f94e81bb7f4f04daf1668850b38763a13d9 ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ }
14
+ ]
cache/models--Salesforce--SFR-Embedding-Mistral/blobs/f8194e4e9432d287bf257d4a7d4a0f2446c32da8 ADDED
@@ -0,0 +1,297 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 14221320192
4
+ },
5
+ "weight_map": {
6
+ "embed_tokens.weight": "model-00001-of-00003.safetensors",
7
+ "layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
8
+ "layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
9
+ "layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
10
+ "layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
11
+ "layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
12
+ "layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
13
+ "layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
14
+ "layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
15
+ "layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
16
+ "layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
17
+ "layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
18
+ "layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
19
+ "layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
20
+ "layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
21
+ "layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
22
+ "layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
23
+ "layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
24
+ "layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
25
+ "layers.10.input_layernorm.weight": "model-00002-of-00003.safetensors",
26
+ "layers.10.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
27
+ "layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
28
+ "layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
29
+ "layers.10.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
30
+ "layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
31
+ "layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
32
+ "layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
33
+ "layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
34
+ "layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
35
+ "layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
36
+ "layers.11.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
37
+ "layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
38
+ "layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
39
+ "layers.11.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
40
+ "layers.11.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
41
+ "layers.11.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
42
+ "layers.11.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
43
+ "layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
44
+ "layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
45
+ "layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
46
+ "layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
47
+ "layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
48
+ "layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
49
+ "layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
50
+ "layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
51
+ "layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
52
+ "layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
53
+ "layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
54
+ "layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
55
+ "layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
56
+ "layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
57
+ "layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
58
+ "layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
59
+ "layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
60
+ "layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
61
+ "layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
62
+ "layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
63
+ "layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
64
+ "layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
65
+ "layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
66
+ "layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
67
+ "layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
68
+ "layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
69
+ "layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
70
+ "layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
71
+ "layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
72
+ "layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
73
+ "layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
74
+ "layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
75
+ "layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
76
+ "layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
77
+ "layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
78
+ "layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
79
+ "layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
80
+ "layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
81
+ "layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
82
+ "layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
83
+ "layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
84
+ "layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
85
+ "layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
86
+ "layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
87
+ "layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
88
+ "layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
89
+ "layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
90
+ "layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
91
+ "layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
92
+ "layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
93
+ "layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
94
+ "layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
95
+ "layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
96
+ "layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
97
+ "layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
98
+ "layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
99
+ "layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
100
+ "layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
101
+ "layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
102
+ "layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
103
+ "layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
104
+ "layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
105
+ "layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
106
+ "layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
107
+ "layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
108
+ "layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
109
+ "layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
110
+ "layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
111
+ "layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
112
+ "layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
113
+ "layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
114
+ "layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
115
+ "layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
116
+ "layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
117
+ "layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
118
+ "layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
119
+ "layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
120
+ "layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
121
+ "layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
122
+ "layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
123
+ "layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
124
+ "layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
125
+ "layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
126
+ "layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
127
+ "layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
128
+ "layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
129
+ "layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
130
+ "layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
131
+ "layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
132
+ "layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
133
+ "layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
134
+ "layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
135
+ "layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
136
+ "layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
137
+ "layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
138
+ "layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
139
+ "layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
140
+ "layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
141
+ "layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
142
+ "layers.22.input_layernorm.weight": "model-00003-of-00003.safetensors",
143
+ "layers.22.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
144
+ "layers.22.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
145
+ "layers.22.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
146
+ "layers.22.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
147
+ "layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
148
+ "layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
149
+ "layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
150
+ "layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
151
+ "layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
152
+ "layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
153
+ "layers.23.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
154
+ "layers.23.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
155
+ "layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
156
+ "layers.23.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
157
+ "layers.23.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
158
+ "layers.23.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
159
+ "layers.23.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
160
+ "layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
161
+ "layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
162
+ "layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
163
+ "layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
164
+ "layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
165
+ "layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
166
+ "layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
167
+ "layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
168
+ "layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
169
+ "layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
170
+ "layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
171
+ "layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
172
+ "layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
173
+ "layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
174
+ "layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
175
+ "layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
176
+ "layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
177
+ "layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
178
+ "layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
179
+ "layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
180
+ "layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
181
+ "layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
182
+ "layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
183
+ "layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
184
+ "layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
185
+ "layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
186
+ "layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
187
+ "layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
188
+ "layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
189
+ "layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
190
+ "layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
191
+ "layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
192
+ "layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
193
+ "layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
194
+ "layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
195
+ "layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
196
+ "layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
197
+ "layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
198
+ "layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
199
+ "layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
200
+ "layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
201
+ "layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
202
+ "layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
203
+ "layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
204
+ "layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
205
+ "layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
206
+ "layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
207
+ "layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
208
+ "layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
209
+ "layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
210
+ "layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
211
+ "layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
212
+ "layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
213
+ "layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
214
+ "layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
215
+ "layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
216
+ "layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
217
+ "layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
218
+ "layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
219
+ "layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
220
+ "layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
221
+ "layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
222
+ "layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
223
+ "layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
224
+ "layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
225
+ "layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
226
+ "layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
227
+ "layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
228
+ "layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
229
+ "layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
230
+ "layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
231
+ "layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
232
+ "layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
233
+ "layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
234
+ "layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
235
+ "layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
236
+ "layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
237
+ "layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
238
+ "layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
239
+ "layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
240
+ "layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
241
+ "layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
242
+ "layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
243
+ "layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
244
+ "layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
245
+ "layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
246
+ "layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
247
+ "layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
248
+ "layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
249
+ "layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
250
+ "layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
251
+ "layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
252
+ "layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
253
+ "layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
254
+ "layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
255
+ "layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
256
+ "layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
257
+ "layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
258
+ "layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
259
+ "layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
260
+ "layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
261
+ "layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
262
+ "layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
263
+ "layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
264
+ "layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
265
+ "layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
266
+ "layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
267
+ "layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
268
+ "layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
269
+ "layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
270
+ "layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
271
+ "layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
272
+ "layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
273
+ "layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
274
+ "layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
275
+ "layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
276
+ "layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
277
+ "layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
278
+ "layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
279
+ "layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
280
+ "layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
281
+ "layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
282
+ "layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
283
+ "layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
284
+ "layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
285
+ "layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
286
+ "layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
287
+ "layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
288
+ "layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
289
+ "layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
290
+ "layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
291
+ "layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
292
+ "layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
293
+ "layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
294
+ "layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
295
+ "norm.weight": "model-00003-of-00003.safetensors"
296
+ }
297
+ }
cache/models--Salesforce--SFR-Embedding-Mistral/blobs/feb95adc7e79e878999ba5a1d3ddfe9f16eff0f1 ADDED
@@ -0,0 +1,3398 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - mteb
4
+ - sentence-transformers
5
+ - transformers
6
+ model-index:
7
+ - name: SFR-Embedding-Mistral
8
+ results:
9
+ - task:
10
+ type: Classification
11
+ dataset:
12
+ type: mteb/amazon_counterfactual
13
+ name: MTEB AmazonCounterfactualClassification (en)
14
+ config: en
15
+ split: test
16
+ revision: e8379541af4e31359cca9fbcf4b00f2671dba205
17
+ metrics:
18
+ - type: accuracy
19
+ value: 77.92537313432834
20
+ - type: ap
21
+ value: 40.86767661556651
22
+ - type: f1
23
+ value: 71.65758897929837
24
+ - task:
25
+ type: Classification
26
+ dataset:
27
+ type: mteb/amazon_polarity
28
+ name: MTEB AmazonPolarityClassification
29
+ config: default
30
+ split: test
31
+ revision: e2d317d38cd51312af73b3d32a06d1a08b442046
32
+ metrics:
33
+ - type: accuracy
34
+ value: 95.967
35
+ - type: ap
36
+ value: 94.46300829592593
37
+ - type: f1
38
+ value: 95.96507173189292
39
+ - task:
40
+ type: Classification
41
+ dataset:
42
+ type: mteb/amazon_reviews_multi
43
+ name: MTEB AmazonReviewsClassification (en)
44
+ config: en
45
+ split: test
46
+ revision: 1399c76144fd37290681b995c656ef9b2e06e26d
47
+ metrics:
48
+ - type: accuracy
49
+ value: 54.352000000000004
50
+ - type: f1
51
+ value: 53.636682615380174
52
+ - task:
53
+ type: Retrieval
54
+ dataset:
55
+ type: arguana
56
+ name: MTEB ArguAna
57
+ config: default
58
+ split: test
59
+ revision: None
60
+ metrics:
61
+ - type: ndcg_at_1
62
+ value: 43.314
63
+ - type: ndcg_at_2
64
+ value: 54.757
65
+ - type: ndcg_at_3
66
+ value: 58.84700000000001
67
+ - type: ndcg_at_5
68
+ value: 63.634
69
+ - type: ndcg_at_7
70
+ value: 65.741
71
+ - type: ndcg_at_10
72
+ value: 67.171
73
+ - type: ndcg_at_20
74
+ value: 68.585
75
+ - type: ndcg_at_30
76
+ value: 68.81
77
+ - type: ndcg_at_50
78
+ value: 68.932
79
+ - type: ndcg_at_70
80
+ value: 68.992
81
+ - type: ndcg_at_100
82
+ value: 69.014
83
+ - type: ndcg_at_200
84
+ value: 69.014
85
+ - type: ndcg_at_300
86
+ value: 69.014
87
+ - type: ndcg_at_500
88
+ value: 69.014
89
+ - type: ndcg_at_700
90
+ value: 69.014
91
+ - type: ndcg_at_1000
92
+ value: 69.014
93
+ - type: map_at_1
94
+ value: 43.314
95
+ - type: map_at_2
96
+ value: 52.383
97
+ - type: map_at_3
98
+ value: 55.108999999999995
99
+ - type: map_at_5
100
+ value: 57.772999999999996
101
+ - type: map_at_7
102
+ value: 58.718
103
+ - type: map_at_10
104
+ value: 59.256
105
+ - type: map_at_20
106
+ value: 59.668
107
+ - type: map_at_30
108
+ value: 59.709999999999994
109
+ - type: map_at_50
110
+ value: 59.727
111
+ - type: map_at_70
112
+ value: 59.733999999999995
113
+ - type: map_at_100
114
+ value: 59.73500000000001
115
+ - type: map_at_200
116
+ value: 59.73500000000001
117
+ - type: map_at_300
118
+ value: 59.73500000000001
119
+ - type: map_at_500
120
+ value: 59.73500000000001
121
+ - type: map_at_700
122
+ value: 59.73500000000001
123
+ - type: map_at_1000
124
+ value: 59.73500000000001
125
+ - type: recall_at_1
126
+ value: 43.314
127
+ - type: recall_at_2
128
+ value: 61.451
129
+ - type: recall_at_3
130
+ value: 69.63000000000001
131
+ - type: recall_at_5
132
+ value: 81.223
133
+ - type: recall_at_7
134
+ value: 87.33999999999999
135
+ - type: recall_at_10
136
+ value: 92.034
137
+ - type: recall_at_20
138
+ value: 97.44
139
+ - type: recall_at_30
140
+ value: 98.506
141
+ - type: recall_at_50
142
+ value: 99.14699999999999
143
+ - type: recall_at_70
144
+ value: 99.502
145
+ - type: recall_at_100
146
+ value: 99.644
147
+ - type: recall_at_200
148
+ value: 99.644
149
+ - type: recall_at_300
150
+ value: 99.644
151
+ - type: recall_at_500
152
+ value: 99.644
153
+ - type: recall_at_700
154
+ value: 99.644
155
+ - type: recall_at_1000
156
+ value: 99.644
157
+ - type: precision_at_1
158
+ value: 43.314
159
+ - type: precision_at_2
160
+ value: 30.725
161
+ - type: precision_at_3
162
+ value: 23.21
163
+ - type: precision_at_5
164
+ value: 16.245
165
+ - type: precision_at_7
166
+ value: 12.477
167
+ - type: precision_at_10
168
+ value: 9.203
169
+ - type: precision_at_20
170
+ value: 4.872
171
+ - type: precision_at_30
172
+ value: 3.2840000000000003
173
+ - type: precision_at_50
174
+ value: 1.983
175
+ - type: precision_at_70
176
+ value: 1.421
177
+ - type: precision_at_100
178
+ value: 0.996
179
+ - type: precision_at_200
180
+ value: 0.498
181
+ - type: precision_at_300
182
+ value: 0.332
183
+ - type: precision_at_500
184
+ value: 0.199
185
+ - type: precision_at_700
186
+ value: 0.14200000000000002
187
+ - type: precision_at_1000
188
+ value: 0.1
189
+ - type: mrr_at_1
190
+ value: 44.666
191
+ - type: mrr_at_2
192
+ value: 52.418
193
+ - type: mrr_at_3
194
+ value: 55.595000000000006
195
+ - type: mrr_at_5
196
+ value: 58.205
197
+ - type: mrr_at_7
198
+ value: 59.202999999999996
199
+ - type: mrr_at_10
200
+ value: 59.727
201
+ - type: mrr_at_20
202
+ value: 60.133
203
+ - type: mrr_at_30
204
+ value: 60.178
205
+ - type: mrr_at_50
206
+ value: 60.192
207
+ - type: mrr_at_70
208
+ value: 60.19799999999999
209
+ - type: mrr_at_100
210
+ value: 60.199999999999996
211
+ - type: mrr_at_200
212
+ value: 60.199999999999996
213
+ - type: mrr_at_300
214
+ value: 60.199999999999996
215
+ - type: mrr_at_500
216
+ value: 60.199999999999996
217
+ - type: mrr_at_700
218
+ value: 60.199999999999996
219
+ - type: mrr_at_1000
220
+ value: 60.199999999999996
221
+ - task:
222
+ type: Clustering
223
+ dataset:
224
+ type: mteb/arxiv-clustering-p2p
225
+ name: MTEB ArxivClusteringP2P
226
+ config: default
227
+ split: test
228
+ revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d
229
+ metrics:
230
+ - type: v_measure
231
+ value: 52.07508593014336
232
+ - task:
233
+ type: Clustering
234
+ dataset:
235
+ type: mteb/arxiv-clustering-s2s
236
+ name: MTEB ArxivClusteringS2S
237
+ config: default
238
+ split: test
239
+ revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53
240
+ metrics:
241
+ - type: v_measure
242
+ value: 47.381339333240675
243
+ - task:
244
+ type: Reranking
245
+ dataset:
246
+ type: mteb/askubuntudupquestions-reranking
247
+ name: MTEB AskUbuntuDupQuestions
248
+ config: default
249
+ split: test
250
+ revision: 2000358ca161889fa9c082cb41daa8dcfb161a54
251
+ metrics:
252
+ - type: map
253
+ value: 67.58376647859171
254
+ - type: mrr
255
+ value: 80.56885635140483
256
+ - task:
257
+ type: STS
258
+ dataset:
259
+ type: mteb/biosses-sts
260
+ name: MTEB BIOSSES
261
+ config: default
262
+ split: test
263
+ revision: d3fb88f8f02e40887cd149695127462bbcf29b4a
264
+ metrics:
265
+ - type: cos_sim_pearson
266
+ value: 88.40107280274783
267
+ - type: cos_sim_spearman
268
+ value: 86.07003345325681
269
+ - type: euclidean_pearson
270
+ value: 87.1726034325395
271
+ - type: euclidean_spearman
272
+ value: 86.07003345325681
273
+ - type: manhattan_pearson
274
+ value: 87.25660625029772
275
+ - type: manhattan_spearman
276
+ value: 86.3808839096893
277
+ - task:
278
+ type: Classification
279
+ dataset:
280
+ type: mteb/banking77
281
+ name: MTEB Banking77Classification
282
+ config: default
283
+ split: test
284
+ revision: 0fd18e25b25c072e09e0d92ab615fda904d66300
285
+ metrics:
286
+ - type: accuracy
287
+ value: 88.81168831168831
288
+ - type: f1
289
+ value: 88.76514496560141
290
+ - task:
291
+ type: Clustering
292
+ dataset:
293
+ type: mteb/biorxiv-clustering-p2p
294
+ name: MTEB BiorxivClusteringP2P
295
+ config: default
296
+ split: test
297
+ revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40
298
+ metrics:
299
+ - type: v_measure
300
+ value: 43.9382520874344
301
+ - task:
302
+ type: Clustering
303
+ dataset:
304
+ type: mteb/biorxiv-clustering-s2s
305
+ name: MTEB BiorxivClusteringS2S
306
+ config: default
307
+ split: test
308
+ revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908
309
+ metrics:
310
+ - type: v_measure
311
+ value: 41.14351847240913
312
+ - task:
313
+ type: Retrieval
314
+ dataset:
315
+ type: BeIR/cqadupstack
316
+ name: MTEB CQADupstackRetrieval
317
+ config: default
318
+ split: test
319
+ revision: None
320
+ metrics:
321
+ - type: ndcg_at_1
322
+ value: 34.51166666666667
323
+ - type: ndcg_at_2
324
+ value: 38.51591666666667
325
+ - type: ndcg_at_3
326
+ value: 40.95083333333333
327
+ - type: ndcg_at_5
328
+ value: 43.580666666666666
329
+ - type: ndcg_at_7
330
+ value: 45.0625
331
+ - type: ndcg_at_10
332
+ value: 46.49083333333333
333
+ - type: ndcg_at_20
334
+ value: 48.731333333333325
335
+ - type: ndcg_at_30
336
+ value: 49.78666666666667
337
+ - type: ndcg_at_50
338
+ value: 50.84049999999999
339
+ - type: ndcg_at_70
340
+ value: 51.393750000000004
341
+ - type: ndcg_at_100
342
+ value: 51.883333333333326
343
+ - type: ndcg_at_200
344
+ value: 52.65225
345
+ - type: ndcg_at_300
346
+ value: 52.98241666666669
347
+ - type: ndcg_at_500
348
+ value: 53.28541666666668
349
+ - type: ndcg_at_700
350
+ value: 53.49241666666668
351
+ - type: ndcg_at_1000
352
+ value: 53.63758333333334
353
+ - type: map_at_1
354
+ value: 29.10075
355
+ - type: map_at_2
356
+ value: 34.636500000000005
357
+ - type: map_at_3
358
+ value: 36.92033333333333
359
+ - type: map_at_5
360
+ value: 38.81641666666666
361
+ - type: map_at_7
362
+ value: 39.635416666666664
363
+ - type: map_at_10
364
+ value: 40.294583333333335
365
+ - type: map_at_20
366
+ value: 41.07574999999999
367
+ - type: map_at_30
368
+ value: 41.333
369
+ - type: map_at_50
370
+ value: 41.529333333333334
371
+ - type: map_at_70
372
+ value: 41.606833333333334
373
+ - type: map_at_100
374
+ value: 41.66224999999999
375
+ - type: map_at_200
376
+ value: 41.72691666666666
377
+ - type: map_at_300
378
+ value: 41.746583333333334
379
+ - type: map_at_500
380
+ value: 41.75983333333333
381
+ - type: map_at_700
382
+ value: 41.76558333333333
383
+ - type: map_at_1000
384
+ value: 41.769000000000005
385
+ - type: recall_at_1
386
+ value: 29.10075
387
+ - type: recall_at_2
388
+ value: 39.07658333333333
389
+ - type: recall_at_3
390
+ value: 44.93591666666667
391
+ - type: recall_at_5
392
+ value: 51.66883333333333
393
+ - type: recall_at_7
394
+ value: 55.881000000000014
395
+ - type: recall_at_10
396
+ value: 60.34691666666667
397
+ - type: recall_at_20
398
+ value: 68.44016666666667
399
+ - type: recall_at_30
400
+ value: 72.90766666666667
401
+ - type: recall_at_50
402
+ value: 77.843
403
+ - type: recall_at_70
404
+ value: 80.70366666666668
405
+ - type: recall_at_100
406
+ value: 83.42866666666667
407
+ - type: recall_at_200
408
+ value: 88.06816666666668
409
+ - type: recall_at_300
410
+ value: 90.249
411
+ - type: recall_at_500
412
+ value: 92.37616666666668
413
+ - type: recall_at_700
414
+ value: 93.978
415
+ - type: recall_at_1000
416
+ value: 95.12791666666666
417
+ - type: precision_at_1
418
+ value: 34.51166666666667
419
+ - type: precision_at_2
420
+ value: 24.326333333333327
421
+ - type: precision_at_3
422
+ value: 19.099249999999998
423
+ - type: precision_at_5
424
+ value: 13.672666666666666
425
+ - type: precision_at_7
426
+ value: 10.772
427
+ - type: precision_at_10
428
+ value: 8.302166666666668
429
+ - type: precision_at_20
430
+ value: 4.8960833333333325
431
+ - type: precision_at_30
432
+ value: 3.551083333333333
433
+ - type: precision_at_50
434
+ value: 2.3386666666666662
435
+ - type: precision_at_70
436
+ value: 1.7605833333333334
437
+ - type: precision_at_100
438
+ value: 1.2965
439
+ - type: precision_at_200
440
+ value: 0.7106666666666668
441
+ - type: precision_at_300
442
+ value: 0.4955
443
+ - type: precision_at_500
444
+ value: 0.3106666666666667
445
+ - type: precision_at_700
446
+ value: 0.22791666666666668
447
+ - type: precision_at_1000
448
+ value: 0.1635833333333333
449
+ - type: mrr_at_1
450
+ value: 34.51166666666667
451
+ - type: mrr_at_2
452
+ value: 39.954249999999995
453
+ - type: mrr_at_3
454
+ value: 41.93741666666668
455
+ - type: mrr_at_5
456
+ value: 43.487166666666674
457
+ - type: mrr_at_7
458
+ value: 44.14983333333333
459
+ - type: mrr_at_10
460
+ value: 44.62766666666666
461
+ - type: mrr_at_20
462
+ value: 45.15291666666668
463
+ - type: mrr_at_30
464
+ value: 45.317
465
+ - type: mrr_at_50
466
+ value: 45.42875
467
+ - type: mrr_at_70
468
+ value: 45.46966666666667
469
+ - type: mrr_at_100
470
+ value: 45.49716666666667
471
+ - type: mrr_at_200
472
+ value: 45.525166666666664
473
+ - type: mrr_at_300
474
+ value: 45.53233333333335
475
+ - type: mrr_at_500
476
+ value: 45.5365
477
+ - type: mrr_at_700
478
+ value: 45.538583333333335
479
+ - type: mrr_at_1000
480
+ value: 45.539583333333326
481
+ - task:
482
+ type: Retrieval
483
+ dataset:
484
+ type: climate-fever
485
+ name: MTEB ClimateFEVER
486
+ config: default
487
+ split: test
488
+ revision: None
489
+ metrics:
490
+ - type: ndcg_at_1
491
+ value: 35.179
492
+ - type: ndcg_at_2
493
+ value: 31.243
494
+ - type: ndcg_at_3
495
+ value: 30.562
496
+ - type: ndcg_at_5
497
+ value: 32.409
498
+ - type: ndcg_at_7
499
+ value: 34.525
500
+ - type: ndcg_at_10
501
+ value: 36.415
502
+ - type: ndcg_at_20
503
+ value: 39.443
504
+ - type: ndcg_at_30
505
+ value: 40.796
506
+ - type: ndcg_at_50
507
+ value: 42.16
508
+ - type: ndcg_at_70
509
+ value: 42.971
510
+ - type: ndcg_at_100
511
+ value: 43.691
512
+ - type: ndcg_at_200
513
+ value: 45.004
514
+ - type: ndcg_at_300
515
+ value: 45.527
516
+ - type: ndcg_at_500
517
+ value: 46.072
518
+ - type: ndcg_at_700
519
+ value: 46.387
520
+ - type: ndcg_at_1000
521
+ value: 46.663
522
+ - type: map_at_1
523
+ value: 15.692
524
+ - type: map_at_2
525
+ value: 20.116
526
+ - type: map_at_3
527
+ value: 22.6
528
+ - type: map_at_5
529
+ value: 24.701
530
+ - type: map_at_7
531
+ value: 25.934
532
+ - type: map_at_10
533
+ value: 26.843
534
+ - type: map_at_20
535
+ value: 27.975
536
+ - type: map_at_30
537
+ value: 28.372000000000003
538
+ - type: map_at_50
539
+ value: 28.671000000000003
540
+ - type: map_at_70
541
+ value: 28.803
542
+ - type: map_at_100
543
+ value: 28.895
544
+ - type: map_at_200
545
+ value: 29.011
546
+ - type: map_at_300
547
+ value: 29.042
548
+ - type: map_at_500
549
+ value: 29.065
550
+ - type: map_at_700
551
+ value: 29.075
552
+ - type: map_at_1000
553
+ value: 29.081000000000003
554
+ - type: recall_at_1
555
+ value: 15.692
556
+ - type: recall_at_2
557
+ value: 22.602
558
+ - type: recall_at_3
559
+ value: 27.814
560
+ - type: recall_at_5
561
+ value: 33.756
562
+ - type: recall_at_7
563
+ value: 38.073
564
+ - type: recall_at_10
565
+ value: 42.553000000000004
566
+ - type: recall_at_20
567
+ value: 51.121
568
+ - type: recall_at_30
569
+ value: 55.523999999999994
570
+ - type: recall_at_50
571
+ value: 60.586
572
+ - type: recall_at_70
573
+ value: 63.94
574
+ - type: recall_at_100
575
+ value: 67.134
576
+ - type: recall_at_200
577
+ value: 73.543
578
+ - type: recall_at_300
579
+ value: 76.372
580
+ - type: recall_at_500
581
+ value: 79.60199999999999
582
+ - type: recall_at_700
583
+ value: 81.536
584
+ - type: recall_at_1000
585
+ value: 83.37400000000001
586
+ - type: precision_at_1
587
+ value: 35.179
588
+ - type: precision_at_2
589
+ value: 27.199
590
+ - type: precision_at_3
591
+ value: 22.953000000000003
592
+ - type: precision_at_5
593
+ value: 17.224999999999998
594
+ - type: precision_at_7
595
+ value: 14.238999999999999
596
+ - type: precision_at_10
597
+ value: 11.303
598
+ - type: precision_at_20
599
+ value: 6.954000000000001
600
+ - type: precision_at_30
601
+ value: 5.116
602
+ - type: precision_at_50
603
+ value: 3.395
604
+ - type: precision_at_70
605
+ value: 2.579
606
+ - type: precision_at_100
607
+ value: 1.9109999999999998
608
+ - type: precision_at_200
609
+ value: 1.065
610
+ - type: precision_at_300
611
+ value: 0.743
612
+ - type: precision_at_500
613
+ value: 0.46699999999999997
614
+ - type: precision_at_700
615
+ value: 0.344
616
+ - type: precision_at_1000
617
+ value: 0.247
618
+ - type: mrr_at_1
619
+ value: 35.179
620
+ - type: mrr_at_2
621
+ value: 41.792
622
+ - type: mrr_at_3
623
+ value: 44.484
624
+ - type: mrr_at_5
625
+ value: 46.39
626
+ - type: mrr_at_7
627
+ value: 47.125
628
+ - type: mrr_at_10
629
+ value: 47.711999999999996
630
+ - type: mrr_at_20
631
+ value: 48.214
632
+ - type: mrr_at_30
633
+ value: 48.325
634
+ - type: mrr_at_50
635
+ value: 48.392
636
+ - type: mrr_at_70
637
+ value: 48.418
638
+ - type: mrr_at_100
639
+ value: 48.44
640
+ - type: mrr_at_200
641
+ value: 48.46
642
+ - type: mrr_at_300
643
+ value: 48.461999999999996
644
+ - type: mrr_at_500
645
+ value: 48.466
646
+ - type: mrr_at_700
647
+ value: 48.466
648
+ - type: mrr_at_1000
649
+ value: 48.467
650
+ - task:
651
+ type: Retrieval
652
+ dataset:
653
+ type: dbpedia-entity
654
+ name: MTEB DBPedia
655
+ config: default
656
+ split: test
657
+ revision: None
658
+ metrics:
659
+ - type: ndcg_at_1
660
+ value: 62.375
661
+ - type: ndcg_at_2
662
+ value: 56.286
663
+ - type: ndcg_at_3
664
+ value: 53.665
665
+ - type: ndcg_at_5
666
+ value: 51.139
667
+ - type: ndcg_at_7
668
+ value: 49.873
669
+ - type: ndcg_at_10
670
+ value: 49.056
671
+ - type: ndcg_at_20
672
+ value: 48.783
673
+ - type: ndcg_at_30
674
+ value: 49.166
675
+ - type: ndcg_at_50
676
+ value: 51.141999999999996
677
+ - type: ndcg_at_70
678
+ value: 52.774
679
+ - type: ndcg_at_100
680
+ value: 54.403
681
+ - type: ndcg_at_200
682
+ value: 57.419
683
+ - type: ndcg_at_300
684
+ value: 58.778
685
+ - type: ndcg_at_500
686
+ value: 60.228
687
+ - type: ndcg_at_700
688
+ value: 61.07599999999999
689
+ - type: ndcg_at_1000
690
+ value: 61.846000000000004
691
+ - type: map_at_1
692
+ value: 10.359
693
+ - type: map_at_2
694
+ value: 14.446
695
+ - type: map_at_3
696
+ value: 16.689
697
+ - type: map_at_5
698
+ value: 20.096
699
+ - type: map_at_7
700
+ value: 22.247
701
+ - type: map_at_10
702
+ value: 24.468999999999998
703
+ - type: map_at_20
704
+ value: 28.938000000000002
705
+ - type: map_at_30
706
+ value: 31.134
707
+ - type: map_at_50
708
+ value: 33.403
709
+ - type: map_at_70
710
+ value: 34.486
711
+ - type: map_at_100
712
+ value: 35.337
713
+ - type: map_at_200
714
+ value: 36.364999999999995
715
+ - type: map_at_300
716
+ value: 36.735
717
+ - type: map_at_500
718
+ value: 37.057
719
+ - type: map_at_700
720
+ value: 37.225
721
+ - type: map_at_1000
722
+ value: 37.379
723
+ - type: recall_at_1
724
+ value: 10.359
725
+ - type: recall_at_2
726
+ value: 14.945
727
+ - type: recall_at_3
728
+ value: 17.694
729
+ - type: recall_at_5
730
+ value: 22.677
731
+ - type: recall_at_7
732
+ value: 26.131
733
+ - type: recall_at_10
734
+ value: 30.053
735
+ - type: recall_at_20
736
+ value: 39.518
737
+ - type: recall_at_30
738
+ value: 44.925
739
+ - type: recall_at_50
740
+ value: 52.154
741
+ - type: recall_at_70
742
+ value: 56.729
743
+ - type: recall_at_100
744
+ value: 61.18900000000001
745
+ - type: recall_at_200
746
+ value: 70.407
747
+ - type: recall_at_300
748
+ value: 74.412
749
+ - type: recall_at_500
750
+ value: 78.891
751
+ - type: recall_at_700
752
+ value: 81.74
753
+ - type: recall_at_1000
754
+ value: 84.253
755
+ - type: precision_at_1
756
+ value: 75
757
+ - type: precision_at_2
758
+ value: 64.125
759
+ - type: precision_at_3
760
+ value: 57.833
761
+ - type: precision_at_5
762
+ value: 50.24999999999999
763
+ - type: precision_at_7
764
+ value: 44.75
765
+ - type: precision_at_10
766
+ value: 39.75
767
+ - type: precision_at_20
768
+ value: 30.412
769
+ - type: precision_at_30
770
+ value: 25.141999999999996
771
+ - type: precision_at_50
772
+ value: 19.2
773
+ - type: precision_at_70
774
+ value: 15.729000000000001
775
+ - type: precision_at_100
776
+ value: 12.552
777
+ - type: precision_at_200
778
+ value: 7.866
779
+ - type: precision_at_300
780
+ value: 5.9270000000000005
781
+ - type: precision_at_500
782
+ value: 4.1129999999999995
783
+ - type: precision_at_700
784
+ value: 3.2460000000000004
785
+ - type: precision_at_1000
786
+ value: 2.5260000000000002
787
+ - type: mrr_at_1
788
+ value: 75
789
+ - type: mrr_at_2
790
+ value: 78.625
791
+ - type: mrr_at_3
792
+ value: 79.708
793
+ - type: mrr_at_5
794
+ value: 80.446
795
+ - type: mrr_at_7
796
+ value: 80.862
797
+ - type: mrr_at_10
798
+ value: 81.161
799
+ - type: mrr_at_20
800
+ value: 81.3
801
+ - type: mrr_at_30
802
+ value: 81.348
803
+ - type: mrr_at_50
804
+ value: 81.361
805
+ - type: mrr_at_70
806
+ value: 81.361
807
+ - type: mrr_at_100
808
+ value: 81.361
809
+ - type: mrr_at_200
810
+ value: 81.367
811
+ - type: mrr_at_300
812
+ value: 81.367
813
+ - type: mrr_at_500
814
+ value: 81.368
815
+ - type: mrr_at_700
816
+ value: 81.368
817
+ - type: mrr_at_1000
818
+ value: 81.368
819
+ - task:
820
+ type: Classification
821
+ dataset:
822
+ type: mteb/emotion
823
+ name: MTEB EmotionClassification
824
+ config: default
825
+ split: test
826
+ revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37
827
+ metrics:
828
+ - type: accuracy
829
+ value: 50.239999999999995
830
+ - type: f1
831
+ value: 46.42361822342044
832
+ - task:
833
+ type: Retrieval
834
+ dataset:
835
+ type: fever
836
+ name: MTEB FEVER
837
+ config: default
838
+ split: test
839
+ revision: None
840
+ metrics:
841
+ - type: ndcg_at_1
842
+ value: 83.723
843
+ - type: ndcg_at_2
844
+ value: 86.777
845
+ - type: ndcg_at_3
846
+ value: 87.997
847
+ - type: ndcg_at_5
848
+ value: 88.864
849
+ - type: ndcg_at_7
850
+ value: 89.143
851
+ - type: ndcg_at_10
852
+ value: 89.349
853
+ - type: ndcg_at_20
854
+ value: 89.709
855
+ - type: ndcg_at_30
856
+ value: 89.82900000000001
857
+ - type: ndcg_at_50
858
+ value: 89.923
859
+ - type: ndcg_at_70
860
+ value: 89.982
861
+ - type: ndcg_at_100
862
+ value: 90.026
863
+ - type: ndcg_at_200
864
+ value: 90.10000000000001
865
+ - type: ndcg_at_300
866
+ value: 90.12599999999999
867
+ - type: ndcg_at_500
868
+ value: 90.17399999999999
869
+ - type: ndcg_at_700
870
+ value: 90.19
871
+ - type: ndcg_at_1000
872
+ value: 90.208
873
+ - type: map_at_1
874
+ value: 77.64999999999999
875
+ - type: map_at_2
876
+ value: 83.769
877
+ - type: map_at_3
878
+ value: 85.041
879
+ - type: map_at_5
880
+ value: 85.736
881
+ - type: map_at_7
882
+ value: 85.924
883
+ - type: map_at_10
884
+ value: 86.032
885
+ - type: map_at_20
886
+ value: 86.177
887
+ - type: map_at_30
888
+ value: 86.213
889
+ - type: map_at_50
890
+ value: 86.233
891
+ - type: map_at_70
892
+ value: 86.24300000000001
893
+ - type: map_at_100
894
+ value: 86.249
895
+ - type: map_at_200
896
+ value: 86.256
897
+ - type: map_at_300
898
+ value: 86.258
899
+ - type: map_at_500
900
+ value: 86.26
901
+ - type: map_at_700
902
+ value: 86.26
903
+ - type: map_at_1000
904
+ value: 86.261
905
+ - type: recall_at_1
906
+ value: 77.64999999999999
907
+ - type: recall_at_2
908
+ value: 88.53999999999999
909
+ - type: recall_at_3
910
+ value: 91.696
911
+ - type: recall_at_5
912
+ value: 93.916
913
+ - type: recall_at_7
914
+ value: 94.731
915
+ - type: recall_at_10
916
+ value: 95.318
917
+ - type: recall_at_20
918
+ value: 96.507
919
+ - type: recall_at_30
920
+ value: 96.956
921
+ - type: recall_at_50
922
+ value: 97.34899999999999
923
+ - type: recall_at_70
924
+ value: 97.61
925
+ - type: recall_at_100
926
+ value: 97.83
927
+ - type: recall_at_200
928
+ value: 98.223
929
+ - type: recall_at_300
930
+ value: 98.374
931
+ - type: recall_at_500
932
+ value: 98.67899999999999
933
+ - type: recall_at_700
934
+ value: 98.787
935
+ - type: recall_at_1000
936
+ value: 98.919
937
+ - type: precision_at_1
938
+ value: 83.723
939
+ - type: precision_at_2
940
+ value: 48.425000000000004
941
+ - type: precision_at_3
942
+ value: 33.638
943
+ - type: precision_at_5
944
+ value: 20.843
945
+ - type: precision_at_7
946
+ value: 15.079
947
+ - type: precision_at_10
948
+ value: 10.674999999999999
949
+ - type: precision_at_20
950
+ value: 5.457999999999999
951
+ - type: precision_at_30
952
+ value: 3.6740000000000004
953
+ - type: precision_at_50
954
+ value: 2.2239999999999998
955
+ - type: precision_at_70
956
+ value: 1.599
957
+ - type: precision_at_100
958
+ value: 1.125
959
+ - type: precision_at_200
960
+ value: 0.5680000000000001
961
+ - type: precision_at_300
962
+ value: 0.38
963
+ - type: precision_at_500
964
+ value: 0.22999999999999998
965
+ - type: precision_at_700
966
+ value: 0.165
967
+ - type: precision_at_1000
968
+ value: 0.116
969
+ - type: mrr_at_1
970
+ value: 83.723
971
+ - type: mrr_at_2
972
+ value: 88.794
973
+ - type: mrr_at_3
974
+ value: 89.679
975
+ - type: mrr_at_5
976
+ value: 90.049
977
+ - type: mrr_at_7
978
+ value: 90.129
979
+ - type: mrr_at_10
980
+ value: 90.167
981
+ - type: mrr_at_20
982
+ value: 90.208
983
+ - type: mrr_at_30
984
+ value: 90.214
985
+ - type: mrr_at_50
986
+ value: 90.217
987
+ - type: mrr_at_70
988
+ value: 90.218
989
+ - type: mrr_at_100
990
+ value: 90.21900000000001
991
+ - type: mrr_at_200
992
+ value: 90.21900000000001
993
+ - type: mrr_at_300
994
+ value: 90.21900000000001
995
+ - type: mrr_at_500
996
+ value: 90.21900000000001
997
+ - type: mrr_at_700
998
+ value: 90.21900000000001
999
+ - type: mrr_at_1000
1000
+ value: 90.21900000000001
1001
+ - task:
1002
+ type: Retrieval
1003
+ dataset:
1004
+ type: fiqa
1005
+ name: MTEB FiQA2018
1006
+ config: default
1007
+ split: test
1008
+ revision: None
1009
+ metrics:
1010
+ - type: ndcg_at_1
1011
+ value: 59.721999999999994
1012
+ - type: ndcg_at_2
1013
+ value: 56.85
1014
+ - type: ndcg_at_3
1015
+ value: 56.462999999999994
1016
+ - type: ndcg_at_5
1017
+ value: 57.75599999999999
1018
+ - type: ndcg_at_7
1019
+ value: 59.109
1020
+ - type: ndcg_at_10
1021
+ value: 60.402
1022
+ - type: ndcg_at_20
1023
+ value: 63.071999999999996
1024
+ - type: ndcg_at_30
1025
+ value: 64.302
1026
+ - type: ndcg_at_50
1027
+ value: 65.619
1028
+ - type: ndcg_at_70
1029
+ value: 66.161
1030
+ - type: ndcg_at_100
1031
+ value: 66.645
1032
+ - type: ndcg_at_200
1033
+ value: 67.353
1034
+ - type: ndcg_at_300
1035
+ value: 67.646
1036
+ - type: ndcg_at_500
1037
+ value: 67.852
1038
+ - type: ndcg_at_700
1039
+ value: 67.974
1040
+ - type: ndcg_at_1000
1041
+ value: 68.084
1042
+ - type: map_at_1
1043
+ value: 31.56
1044
+ - type: map_at_2
1045
+ value: 42.093
1046
+ - type: map_at_3
1047
+ value: 46.177
1048
+ - type: map_at_5
1049
+ value: 49.78
1050
+ - type: map_at_7
1051
+ value: 51.410999999999994
1052
+ - type: map_at_10
1053
+ value: 52.524
1054
+ - type: map_at_20
1055
+ value: 53.815000000000005
1056
+ - type: map_at_30
1057
+ value: 54.201
1058
+ - type: map_at_50
1059
+ value: 54.531
1060
+ - type: map_at_70
1061
+ value: 54.625
1062
+ - type: map_at_100
1063
+ value: 54.686
1064
+ - type: map_at_200
1065
+ value: 54.757999999999996
1066
+ - type: map_at_300
1067
+ value: 54.776
1068
+ - type: map_at_500
1069
+ value: 54.786
1070
+ - type: map_at_700
1071
+ value: 54.790000000000006
1072
+ - type: map_at_1000
1073
+ value: 54.793000000000006
1074
+ - type: recall_at_1
1075
+ value: 31.56
1076
+ - type: recall_at_2
1077
+ value: 44.858
1078
+ - type: recall_at_3
1079
+ value: 51.11
1080
+ - type: recall_at_5
1081
+ value: 58.394
1082
+ - type: recall_at_7
1083
+ value: 63.001
1084
+ - type: recall_at_10
1085
+ value: 66.81200000000001
1086
+ - type: recall_at_20
1087
+ value: 74.901
1088
+ - type: recall_at_30
1089
+ value: 79.218
1090
+ - type: recall_at_50
1091
+ value: 84.49
1092
+ - type: recall_at_70
1093
+ value: 87.003
1094
+ - type: recall_at_100
1095
+ value: 89.345
1096
+ - type: recall_at_200
1097
+ value: 93.173
1098
+ - type: recall_at_300
1099
+ value: 94.906
1100
+ - type: recall_at_500
1101
+ value: 96.223
1102
+ - type: recall_at_700
1103
+ value: 97.043
1104
+ - type: recall_at_1000
1105
+ value: 97.785
1106
+ - type: precision_at_1
1107
+ value: 59.721999999999994
1108
+ - type: precision_at_2
1109
+ value: 46.682
1110
+ - type: precision_at_3
1111
+ value: 37.602999999999994
1112
+ - type: precision_at_5
1113
+ value: 27.500000000000004
1114
+ - type: precision_at_7
1115
+ value: 21.847
1116
+ - type: precision_at_10
1117
+ value: 16.667
1118
+ - type: precision_at_20
1119
+ value: 9.545
1120
+ - type: precision_at_30
1121
+ value: 6.795
1122
+ - type: precision_at_50
1123
+ value: 4.38
1124
+ - type: precision_at_70
1125
+ value: 3.221
1126
+ - type: precision_at_100
1127
+ value: 2.319
1128
+ - type: precision_at_200
1129
+ value: 1.2149999999999999
1130
+ - type: precision_at_300
1131
+ value: 0.827
1132
+ - type: precision_at_500
1133
+ value: 0.504
1134
+ - type: precision_at_700
1135
+ value: 0.364
1136
+ - type: precision_at_1000
1137
+ value: 0.257
1138
+ - type: mrr_at_1
1139
+ value: 59.721999999999994
1140
+ - type: mrr_at_2
1141
+ value: 64.506
1142
+ - type: mrr_at_3
1143
+ value: 65.792
1144
+ - type: mrr_at_5
1145
+ value: 66.965
1146
+ - type: mrr_at_7
1147
+ value: 67.34700000000001
1148
+ - type: mrr_at_10
1149
+ value: 67.57
1150
+ - type: mrr_at_20
1151
+ value: 67.896
1152
+ - type: mrr_at_30
1153
+ value: 68.008
1154
+ - type: mrr_at_50
1155
+ value: 68.083
1156
+ - type: mrr_at_70
1157
+ value: 68.105
1158
+ - type: mrr_at_100
1159
+ value: 68.116
1160
+ - type: mrr_at_200
1161
+ value: 68.12700000000001
1162
+ - type: mrr_at_300
1163
+ value: 68.13
1164
+ - type: mrr_at_500
1165
+ value: 68.132
1166
+ - type: mrr_at_700
1167
+ value: 68.133
1168
+ - type: mrr_at_1000
1169
+ value: 68.133
1170
+ - task:
1171
+ type: Retrieval
1172
+ dataset:
1173
+ type: hotpotqa
1174
+ name: MTEB HotpotQA
1175
+ config: default
1176
+ split: test
1177
+ revision: None
1178
+ metrics:
1179
+ - type: ndcg_at_1
1180
+ value: 81.796
1181
+ - type: ndcg_at_2
1182
+ value: 67.999
1183
+ - type: ndcg_at_3
1184
+ value: 72.15599999999999
1185
+ - type: ndcg_at_5
1186
+ value: 74.99900000000001
1187
+ - type: ndcg_at_7
1188
+ value: 76.179
1189
+ - type: ndcg_at_10
1190
+ value: 77.022
1191
+ - type: ndcg_at_20
1192
+ value: 78.173
1193
+ - type: ndcg_at_30
1194
+ value: 78.648
1195
+ - type: ndcg_at_50
1196
+ value: 79.104
1197
+ - type: ndcg_at_70
1198
+ value: 79.335
1199
+ - type: ndcg_at_100
1200
+ value: 79.56
1201
+ - type: ndcg_at_200
1202
+ value: 79.911
1203
+ - type: ndcg_at_300
1204
+ value: 80.045
1205
+ - type: ndcg_at_500
1206
+ value: 80.19500000000001
1207
+ - type: ndcg_at_700
1208
+ value: 80.281
1209
+ - type: ndcg_at_1000
1210
+ value: 80.35
1211
+ - type: map_at_1
1212
+ value: 40.898
1213
+ - type: map_at_2
1214
+ value: 62.016000000000005
1215
+ - type: map_at_3
1216
+ value: 66.121
1217
+ - type: map_at_5
1218
+ value: 68.471
1219
+ - type: map_at_7
1220
+ value: 69.261
1221
+ - type: map_at_10
1222
+ value: 69.738
1223
+ - type: map_at_20
1224
+ value: 70.208
1225
+ - type: map_at_30
1226
+ value: 70.343
1227
+ - type: map_at_50
1228
+ value: 70.43700000000001
1229
+ - type: map_at_70
1230
+ value: 70.47099999999999
1231
+ - type: map_at_100
1232
+ value: 70.498
1233
+ - type: map_at_200
1234
+ value: 70.526
1235
+ - type: map_at_300
1236
+ value: 70.533
1237
+ - type: map_at_500
1238
+ value: 70.538
1239
+ - type: map_at_700
1240
+ value: 70.541
1241
+ - type: map_at_1000
1242
+ value: 70.542
1243
+ - type: recall_at_1
1244
+ value: 40.898
1245
+ - type: recall_at_2
1246
+ value: 63.964
1247
+ - type: recall_at_3
1248
+ value: 70.743
1249
+ - type: recall_at_5
1250
+ value: 76.36699999999999
1251
+ - type: recall_at_7
1252
+ value: 79.142
1253
+ - type: recall_at_10
1254
+ value: 81.404
1255
+ - type: recall_at_20
1256
+ value: 85.111
1257
+ - type: recall_at_30
1258
+ value: 86.92800000000001
1259
+ - type: recall_at_50
1260
+ value: 88.899
1261
+ - type: recall_at_70
1262
+ value: 90.01400000000001
1263
+ - type: recall_at_100
1264
+ value: 91.19500000000001
1265
+ - type: recall_at_200
1266
+ value: 93.234
1267
+ - type: recall_at_300
1268
+ value: 94.105
1269
+ - type: recall_at_500
1270
+ value: 95.159
1271
+ - type: recall_at_700
1272
+ value: 95.8
1273
+ - type: recall_at_1000
1274
+ value: 96.34700000000001
1275
+ - type: precision_at_1
1276
+ value: 81.796
1277
+ - type: precision_at_2
1278
+ value: 63.964
1279
+ - type: precision_at_3
1280
+ value: 47.162
1281
+ - type: precision_at_5
1282
+ value: 30.547
1283
+ - type: precision_at_7
1284
+ value: 22.612
1285
+ - type: precision_at_10
1286
+ value: 16.281000000000002
1287
+ - type: precision_at_20
1288
+ value: 8.511000000000001
1289
+ - type: precision_at_30
1290
+ value: 5.795
1291
+ - type: precision_at_50
1292
+ value: 3.556
1293
+ - type: precision_at_70
1294
+ value: 2.572
1295
+ - type: precision_at_100
1296
+ value: 1.8239999999999998
1297
+ - type: precision_at_200
1298
+ value: 0.932
1299
+ - type: precision_at_300
1300
+ value: 0.627
1301
+ - type: precision_at_500
1302
+ value: 0.381
1303
+ - type: precision_at_700
1304
+ value: 0.27399999999999997
1305
+ - type: precision_at_1000
1306
+ value: 0.193
1307
+ - type: mrr_at_1
1308
+ value: 81.796
1309
+ - type: mrr_at_2
1310
+ value: 85.69200000000001
1311
+ - type: mrr_at_3
1312
+ value: 86.52
1313
+ - type: mrr_at_5
1314
+ value: 86.973
1315
+ - type: mrr_at_7
1316
+ value: 87.13300000000001
1317
+ - type: mrr_at_10
1318
+ value: 87.208
1319
+ - type: mrr_at_20
1320
+ value: 87.303
1321
+ - type: mrr_at_30
1322
+ value: 87.32799999999999
1323
+ - type: mrr_at_50
1324
+ value: 87.347
1325
+ - type: mrr_at_70
1326
+ value: 87.35199999999999
1327
+ - type: mrr_at_100
1328
+ value: 87.355
1329
+ - type: mrr_at_200
1330
+ value: 87.357
1331
+ - type: mrr_at_300
1332
+ value: 87.357
1333
+ - type: mrr_at_500
1334
+ value: 87.358
1335
+ - type: mrr_at_700
1336
+ value: 87.358
1337
+ - type: mrr_at_1000
1338
+ value: 87.358
1339
+ - task:
1340
+ type: Classification
1341
+ dataset:
1342
+ type: mteb/imdb
1343
+ name: MTEB ImdbClassification
1344
+ config: default
1345
+ split: test
1346
+ revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7
1347
+ metrics:
1348
+ - type: accuracy
1349
+ value: 94.79200000000002
1350
+ - type: ap
1351
+ value: 92.54484356773553
1352
+ - type: f1
1353
+ value: 94.78965313682525
1354
+ - task:
1355
+ type: Retrieval
1356
+ dataset:
1357
+ type: msmarco
1358
+ name: MTEB MSMARCO
1359
+ config: default
1360
+ split: dev
1361
+ revision: None
1362
+ metrics:
1363
+ - type: ndcg_at_1
1364
+ value: 24.398
1365
+ - type: ndcg_at_2
1366
+ value: 31.336000000000002
1367
+ - type: ndcg_at_3
1368
+ value: 35.266999999999996
1369
+ - type: ndcg_at_5
1370
+ value: 39.356
1371
+ - type: ndcg_at_7
1372
+ value: 41.562
1373
+ - type: ndcg_at_10
1374
+ value: 43.408
1375
+ - type: ndcg_at_20
1376
+ value: 46.107
1377
+ - type: ndcg_at_30
1378
+ value: 47.164
1379
+ - type: ndcg_at_50
1380
+ value: 48.126000000000005
1381
+ - type: ndcg_at_70
1382
+ value: 48.626999999999995
1383
+ - type: ndcg_at_100
1384
+ value: 49.043
1385
+ - type: ndcg_at_200
1386
+ value: 49.575
1387
+ - type: ndcg_at_300
1388
+ value: 49.794
1389
+ - type: ndcg_at_500
1390
+ value: 49.942
1391
+ - type: ndcg_at_700
1392
+ value: 50.014
1393
+ - type: ndcg_at_1000
1394
+ value: 50.077000000000005
1395
+ - type: map_at_1
1396
+ value: 23.723
1397
+ - type: map_at_2
1398
+ value: 29.593000000000004
1399
+ - type: map_at_3
1400
+ value: 32.273
1401
+ - type: map_at_5
1402
+ value: 34.587
1403
+ - type: map_at_7
1404
+ value: 35.589999999999996
1405
+ - type: map_at_10
1406
+ value: 36.296
1407
+ - type: map_at_20
1408
+ value: 37.059999999999995
1409
+ - type: map_at_30
1410
+ value: 37.265
1411
+ - type: map_at_50
1412
+ value: 37.402
1413
+ - type: map_at_70
1414
+ value: 37.454
1415
+ - type: map_at_100
1416
+ value: 37.486999999999995
1417
+ - type: map_at_200
1418
+ value: 37.516
1419
+ - type: map_at_300
1420
+ value: 37.524
1421
+ - type: map_at_500
1422
+ value: 37.528
1423
+ - type: map_at_700
1424
+ value: 37.529
1425
+ - type: map_at_1000
1426
+ value: 37.53
1427
+ - type: recall_at_1
1428
+ value: 23.723
1429
+ - type: recall_at_2
1430
+ value: 35.355
1431
+ - type: recall_at_3
1432
+ value: 43.22
1433
+ - type: recall_at_5
1434
+ value: 53.025
1435
+ - type: recall_at_7
1436
+ value: 59.327
1437
+ - type: recall_at_10
1438
+ value: 65.302
1439
+ - type: recall_at_20
1440
+ value: 75.765
1441
+ - type: recall_at_30
1442
+ value: 80.632
1443
+ - type: recall_at_50
1444
+ value: 85.63499999999999
1445
+ - type: recall_at_70
1446
+ value: 88.554
1447
+ - type: recall_at_100
1448
+ value: 91.16300000000001
1449
+ - type: recall_at_200
1450
+ value: 94.85
1451
+ - type: recall_at_300
1452
+ value: 96.532
1453
+ - type: recall_at_500
1454
+ value: 97.751
1455
+ - type: recall_at_700
1456
+ value: 98.383
1457
+ - type: recall_at_1000
1458
+ value: 98.97
1459
+ - type: precision_at_1
1460
+ value: 24.398
1461
+ - type: precision_at_2
1462
+ value: 18.274
1463
+ - type: precision_at_3
1464
+ value: 14.951999999999998
1465
+ - type: precision_at_5
1466
+ value: 11.052
1467
+ - type: precision_at_7
1468
+ value: 8.84
1469
+ - type: precision_at_10
1470
+ value: 6.8309999999999995
1471
+ - type: precision_at_20
1472
+ value: 3.978
1473
+ - type: precision_at_30
1474
+ value: 2.827
1475
+ - type: precision_at_50
1476
+ value: 1.807
1477
+ - type: precision_at_70
1478
+ value: 1.336
1479
+ - type: precision_at_100
1480
+ value: 0.964
1481
+ - type: precision_at_200
1482
+ value: 0.502
1483
+ - type: precision_at_300
1484
+ value: 0.34099999999999997
1485
+ - type: precision_at_500
1486
+ value: 0.208
1487
+ - type: precision_at_700
1488
+ value: 0.15
1489
+ - type: precision_at_1000
1490
+ value: 0.105
1491
+ - type: mrr_at_1
1492
+ value: 24.398
1493
+ - type: mrr_at_2
1494
+ value: 30.351
1495
+ - type: mrr_at_3
1496
+ value: 33.001000000000005
1497
+ - type: mrr_at_5
1498
+ value: 35.228
1499
+ - type: mrr_at_7
1500
+ value: 36.223
1501
+ - type: mrr_at_10
1502
+ value: 36.903999999999996
1503
+ - type: mrr_at_20
1504
+ value: 37.631
1505
+ - type: mrr_at_30
1506
+ value: 37.830000000000005
1507
+ - type: mrr_at_50
1508
+ value: 37.955
1509
+ - type: mrr_at_70
1510
+ value: 38.003
1511
+ - type: mrr_at_100
1512
+ value: 38.033
1513
+ - type: mrr_at_200
1514
+ value: 38.059
1515
+ - type: mrr_at_300
1516
+ value: 38.066
1517
+ - type: mrr_at_500
1518
+ value: 38.068999999999996
1519
+ - type: mrr_at_700
1520
+ value: 38.07
1521
+ - type: mrr_at_1000
1522
+ value: 38.07
1523
+ - task:
1524
+ type: Classification
1525
+ dataset:
1526
+ type: mteb/mtop_domain
1527
+ name: MTEB MTOPDomainClassification (en)
1528
+ config: en
1529
+ split: test
1530
+ revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf
1531
+ metrics:
1532
+ - type: accuracy
1533
+ value: 96.35658914728683
1534
+ - type: f1
1535
+ value: 96.15039630903114
1536
+ - task:
1537
+ type: Classification
1538
+ dataset:
1539
+ type: mteb/mtop_intent
1540
+ name: MTEB MTOPIntentClassification (en)
1541
+ config: en
1542
+ split: test
1543
+ revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba
1544
+ metrics:
1545
+ - type: accuracy
1546
+ value: 86.29730962152303
1547
+ - type: f1
1548
+ value: 71.12166316567485
1549
+ - task:
1550
+ type: Classification
1551
+ dataset:
1552
+ type: mteb/amazon_massive_intent
1553
+ name: MTEB MassiveIntentClassification (en)
1554
+ config: en
1555
+ split: test
1556
+ revision: 31efe3c427b0bae9c22cbb560b8f15491cc6bed7
1557
+ metrics:
1558
+ - type: accuracy
1559
+ value: 79.98991257565568
1560
+ - type: f1
1561
+ value: 77.41680115095276
1562
+ - task:
1563
+ type: Classification
1564
+ dataset:
1565
+ type: mteb/amazon_massive_scenario
1566
+ name: MTEB MassiveScenarioClassification (en)
1567
+ config: en
1568
+ split: test
1569
+ revision: 7d571f92784cd94a019292a1f45445077d0ef634
1570
+ metrics:
1571
+ - type: accuracy
1572
+ value: 82.1990585070612
1573
+ - type: f1
1574
+ value: 82.23719179179362
1575
+ - task:
1576
+ type: Clustering
1577
+ dataset:
1578
+ type: mteb/medrxiv-clustering-p2p
1579
+ name: MTEB MedrxivClusteringP2P
1580
+ config: default
1581
+ split: test
1582
+ revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73
1583
+ metrics:
1584
+ - type: v_measure
1585
+ value: 40.03019554933584
1586
+ - task:
1587
+ type: Clustering
1588
+ dataset:
1589
+ type: mteb/medrxiv-clustering-s2s
1590
+ name: MTEB MedrxivClusteringS2S
1591
+ config: default
1592
+ split: test
1593
+ revision: 35191c8c0dca72d8ff3efcd72aa802307d469663
1594
+ metrics:
1595
+ - type: v_measure
1596
+ value: 38.999760551497815
1597
+ - task:
1598
+ type: Reranking
1599
+ dataset:
1600
+ type: mteb/mind_small
1601
+ name: MTEB MindSmallReranking
1602
+ config: default
1603
+ split: test
1604
+ revision: 3bdac13927fdc888b903db93b2ffdbd90b295a69
1605
+ metrics:
1606
+ - type: map
1607
+ value: 32.72383151953079
1608
+ - type: mrr
1609
+ value: 33.93989699030721
1610
+ - task:
1611
+ type: Retrieval
1612
+ dataset:
1613
+ type: nfcorpus
1614
+ name: MTEB NFCorpus
1615
+ config: default
1616
+ split: test
1617
+ revision: None
1618
+ metrics:
1619
+ - type: ndcg_at_1
1620
+ value: 51.858000000000004
1621
+ - type: ndcg_at_2
1622
+ value: 49.675999999999995
1623
+ - type: ndcg_at_3
1624
+ value: 47.519
1625
+ - type: ndcg_at_5
1626
+ value: 45.198
1627
+ - type: ndcg_at_7
1628
+ value: 43.504
1629
+ - type: ndcg_at_10
1630
+ value: 41.88
1631
+ - type: ndcg_at_20
1632
+ value: 39.122
1633
+ - type: ndcg_at_30
1634
+ value: 37.95
1635
+ - type: ndcg_at_50
1636
+ value: 37.602999999999994
1637
+ - type: ndcg_at_70
1638
+ value: 37.836
1639
+ - type: ndcg_at_100
1640
+ value: 38.493
1641
+ - type: ndcg_at_200
1642
+ value: 40.187
1643
+ - type: ndcg_at_300
1644
+ value: 41.524
1645
+ - type: ndcg_at_500
1646
+ value: 43.657000000000004
1647
+ - type: ndcg_at_700
1648
+ value: 45.234
1649
+ - type: ndcg_at_1000
1650
+ value: 47.047
1651
+ - type: map_at_1
1652
+ value: 6.392
1653
+ - type: map_at_2
1654
+ value: 10.113
1655
+ - type: map_at_3
1656
+ value: 11.543000000000001
1657
+ - type: map_at_5
1658
+ value: 13.729
1659
+ - type: map_at_7
1660
+ value: 14.985000000000001
1661
+ - type: map_at_10
1662
+ value: 16.217000000000002
1663
+ - type: map_at_20
1664
+ value: 18.106
1665
+ - type: map_at_30
1666
+ value: 18.878
1667
+ - type: map_at_50
1668
+ value: 19.822
1669
+ - type: map_at_70
1670
+ value: 20.352999999999998
1671
+ - type: map_at_100
1672
+ value: 20.827
1673
+ - type: map_at_200
1674
+ value: 21.512
1675
+ - type: map_at_300
1676
+ value: 21.826
1677
+ - type: map_at_500
1678
+ value: 22.155
1679
+ - type: map_at_700
1680
+ value: 22.349
1681
+ - type: map_at_1000
1682
+ value: 22.531000000000002
1683
+ - type: recall_at_1
1684
+ value: 6.392
1685
+ - type: recall_at_2
1686
+ value: 11.215
1687
+ - type: recall_at_3
1688
+ value: 13.231000000000002
1689
+ - type: recall_at_5
1690
+ value: 16.66
1691
+ - type: recall_at_7
1692
+ value: 18.802
1693
+ - type: recall_at_10
1694
+ value: 21.185000000000002
1695
+ - type: recall_at_20
1696
+ value: 25.35
1697
+ - type: recall_at_30
1698
+ value: 27.91
1699
+ - type: recall_at_50
1700
+ value: 32.845
1701
+ - type: recall_at_70
1702
+ value: 35.789
1703
+ - type: recall_at_100
1704
+ value: 39.247
1705
+ - type: recall_at_200
1706
+ value: 46.655
1707
+ - type: recall_at_300
1708
+ value: 51.43299999999999
1709
+ - type: recall_at_500
1710
+ value: 59.472
1711
+ - type: recall_at_700
1712
+ value: 64.742
1713
+ - type: recall_at_1000
1714
+ value: 70.97099999999999
1715
+ - type: precision_at_1
1716
+ value: 53.559999999999995
1717
+ - type: precision_at_2
1718
+ value: 48.762
1719
+ - type: precision_at_3
1720
+ value: 44.169000000000004
1721
+ - type: precision_at_5
1722
+ value: 39.071
1723
+ - type: precision_at_7
1724
+ value: 35.161
1725
+ - type: precision_at_10
1726
+ value: 31.238
1727
+ - type: precision_at_20
1728
+ value: 23.064999999999998
1729
+ - type: precision_at_30
1730
+ value: 18.844
1731
+ - type: precision_at_50
1732
+ value: 14.601
1733
+ - type: precision_at_70
1734
+ value: 12.088000000000001
1735
+ - type: precision_at_100
1736
+ value: 9.844999999999999
1737
+ - type: precision_at_200
1738
+ value: 6.358
1739
+ - type: precision_at_300
1740
+ value: 4.915
1741
+ - type: precision_at_500
1742
+ value: 3.531
1743
+ - type: precision_at_700
1744
+ value: 2.8649999999999998
1745
+ - type: precision_at_1000
1746
+ value: 2.289
1747
+ - type: mrr_at_1
1748
+ value: 54.17999999999999
1749
+ - type: mrr_at_2
1750
+ value: 59.288
1751
+ - type: mrr_at_3
1752
+ value: 60.836
1753
+ - type: mrr_at_5
1754
+ value: 62.275999999999996
1755
+ - type: mrr_at_7
1756
+ value: 62.688
1757
+ - type: mrr_at_10
1758
+ value: 62.865
1759
+ - type: mrr_at_20
1760
+ value: 63.11
1761
+ - type: mrr_at_30
1762
+ value: 63.193999999999996
1763
+ - type: mrr_at_50
1764
+ value: 63.258
1765
+ - type: mrr_at_70
1766
+ value: 63.278
1767
+ - type: mrr_at_100
1768
+ value: 63.297000000000004
1769
+ - type: mrr_at_200
1770
+ value: 63.315999999999995
1771
+ - type: mrr_at_300
1772
+ value: 63.318
1773
+ - type: mrr_at_500
1774
+ value: 63.32299999999999
1775
+ - type: mrr_at_700
1776
+ value: 63.324000000000005
1777
+ - type: mrr_at_1000
1778
+ value: 63.324999999999996
1779
+ - task:
1780
+ type: Retrieval
1781
+ dataset:
1782
+ type: nq
1783
+ name: MTEB NQ
1784
+ config: default
1785
+ split: test
1786
+ revision: None
1787
+ metrics:
1788
+ - type: ndcg_at_1
1789
+ value: 50.897999999999996
1790
+ - type: ndcg_at_2
1791
+ value: 59.126
1792
+ - type: ndcg_at_3
1793
+ value: 63.093999999999994
1794
+ - type: ndcg_at_5
1795
+ value: 67.197
1796
+ - type: ndcg_at_7
1797
+ value: 68.719
1798
+ - type: ndcg_at_10
1799
+ value: 69.915
1800
+ - type: ndcg_at_20
1801
+ value: 71.229
1802
+ - type: ndcg_at_30
1803
+ value: 71.667
1804
+ - type: ndcg_at_50
1805
+ value: 71.98
1806
+ - type: ndcg_at_70
1807
+ value: 72.127
1808
+ - type: ndcg_at_100
1809
+ value: 72.217
1810
+ - type: ndcg_at_200
1811
+ value: 72.319
1812
+ - type: ndcg_at_300
1813
+ value: 72.347
1814
+ - type: ndcg_at_500
1815
+ value: 72.37
1816
+ - type: ndcg_at_700
1817
+ value: 72.379
1818
+ - type: ndcg_at_1000
1819
+ value: 72.381
1820
+ - type: map_at_1
1821
+ value: 45.297
1822
+ - type: map_at_2
1823
+ value: 55.596000000000004
1824
+ - type: map_at_3
1825
+ value: 58.724
1826
+ - type: map_at_5
1827
+ value: 61.387
1828
+ - type: map_at_7
1829
+ value: 62.173
1830
+ - type: map_at_10
1831
+ value: 62.69
1832
+ - type: map_at_20
1833
+ value: 63.125
1834
+ - type: map_at_30
1835
+ value: 63.223
1836
+ - type: map_at_50
1837
+ value: 63.27700000000001
1838
+ - type: map_at_70
1839
+ value: 63.295
1840
+ - type: map_at_100
1841
+ value: 63.303
1842
+ - type: map_at_200
1843
+ value: 63.31
1844
+ - type: map_at_300
1845
+ value: 63.31099999999999
1846
+ - type: map_at_500
1847
+ value: 63.312000000000005
1848
+ - type: map_at_700
1849
+ value: 63.312000000000005
1850
+ - type: map_at_1000
1851
+ value: 63.312000000000005
1852
+ - type: recall_at_1
1853
+ value: 45.297
1854
+ - type: recall_at_2
1855
+ value: 63.866
1856
+ - type: recall_at_3
1857
+ value: 71.898
1858
+ - type: recall_at_5
1859
+ value: 81.16600000000001
1860
+ - type: recall_at_7
1861
+ value: 85.301
1862
+ - type: recall_at_10
1863
+ value: 88.94800000000001
1864
+ - type: recall_at_20
1865
+ value: 93.719
1866
+ - type: recall_at_30
1867
+ value: 95.628
1868
+ - type: recall_at_50
1869
+ value: 97.14699999999999
1870
+ - type: recall_at_70
1871
+ value: 97.955
1872
+ - type: recall_at_100
1873
+ value: 98.48599999999999
1874
+ - type: recall_at_200
1875
+ value: 99.157
1876
+ - type: recall_at_300
1877
+ value: 99.355
1878
+ - type: recall_at_500
1879
+ value: 99.53699999999999
1880
+ - type: recall_at_700
1881
+ value: 99.62299999999999
1882
+ - type: recall_at_1000
1883
+ value: 99.638
1884
+ - type: precision_at_1
1885
+ value: 50.897999999999996
1886
+ - type: precision_at_2
1887
+ value: 36.703
1888
+ - type: precision_at_3
1889
+ value: 27.926000000000002
1890
+ - type: precision_at_5
1891
+ value: 19.276
1892
+ - type: precision_at_7
1893
+ value: 14.533999999999999
1894
+ - type: precision_at_10
1895
+ value: 10.678
1896
+ - type: precision_at_20
1897
+ value: 5.663
1898
+ - type: precision_at_30
1899
+ value: 3.8600000000000003
1900
+ - type: precision_at_50
1901
+ value: 2.358
1902
+ - type: precision_at_70
1903
+ value: 1.7000000000000002
1904
+ - type: precision_at_100
1905
+ value: 1.198
1906
+ - type: precision_at_200
1907
+ value: 0.603
1908
+ - type: precision_at_300
1909
+ value: 0.40299999999999997
1910
+ - type: precision_at_500
1911
+ value: 0.242
1912
+ - type: precision_at_700
1913
+ value: 0.173
1914
+ - type: precision_at_1000
1915
+ value: 0.121
1916
+ - type: mrr_at_1
1917
+ value: 50.897999999999996
1918
+ - type: mrr_at_2
1919
+ value: 59.994
1920
+ - type: mrr_at_3
1921
+ value: 62.553000000000004
1922
+ - type: mrr_at_5
1923
+ value: 64.307
1924
+ - type: mrr_at_7
1925
+ value: 64.864
1926
+ - type: mrr_at_10
1927
+ value: 65.22200000000001
1928
+ - type: mrr_at_20
1929
+ value: 65.499
1930
+ - type: mrr_at_30
1931
+ value: 65.561
1932
+ - type: mrr_at_50
1933
+ value: 65.592
1934
+ - type: mrr_at_70
1935
+ value: 65.602
1936
+ - type: mrr_at_100
1937
+ value: 65.607
1938
+ - type: mrr_at_200
1939
+ value: 65.61099999999999
1940
+ - type: mrr_at_300
1941
+ value: 65.61200000000001
1942
+ - type: mrr_at_500
1943
+ value: 65.61200000000001
1944
+ - type: mrr_at_700
1945
+ value: 65.61200000000001
1946
+ - type: mrr_at_1000
1947
+ value: 65.61200000000001
1948
+ - task:
1949
+ type: Retrieval
1950
+ dataset:
1951
+ type: quora
1952
+ name: MTEB QuoraRetrieval
1953
+ config: default
1954
+ split: test
1955
+ revision: None
1956
+ metrics:
1957
+ - type: ndcg_at_1
1958
+ value: 82.96
1959
+ - type: ndcg_at_2
1960
+ value: 85.614
1961
+ - type: ndcg_at_3
1962
+ value: 87.19
1963
+ - type: ndcg_at_5
1964
+ value: 88.654
1965
+ - type: ndcg_at_7
1966
+ value: 89.287
1967
+ - type: ndcg_at_10
1968
+ value: 89.785
1969
+ - type: ndcg_at_20
1970
+ value: 90.384
1971
+ - type: ndcg_at_30
1972
+ value: 90.589
1973
+ - type: ndcg_at_50
1974
+ value: 90.738
1975
+ - type: ndcg_at_70
1976
+ value: 90.789
1977
+ - type: ndcg_at_100
1978
+ value: 90.824
1979
+ - type: ndcg_at_200
1980
+ value: 90.869
1981
+ - type: ndcg_at_300
1982
+ value: 90.881
1983
+ - type: ndcg_at_500
1984
+ value: 90.886
1985
+ - type: ndcg_at_700
1986
+ value: 90.889
1987
+ - type: ndcg_at_1000
1988
+ value: 90.889
1989
+ - type: map_at_1
1990
+ value: 72.152
1991
+ - type: map_at_2
1992
+ value: 80.818
1993
+ - type: map_at_3
1994
+ value: 83.462
1995
+ - type: map_at_5
1996
+ value: 85.286
1997
+ - type: map_at_7
1998
+ value: 85.921
1999
+ - type: map_at_10
2000
+ value: 86.334
2001
+ - type: map_at_20
2002
+ value: 86.737
2003
+ - type: map_at_30
2004
+ value: 86.847
2005
+ - type: map_at_50
2006
+ value: 86.911
2007
+ - type: map_at_70
2008
+ value: 86.932
2009
+ - type: map_at_100
2010
+ value: 86.943
2011
+ - type: map_at_200
2012
+ value: 86.953
2013
+ - type: map_at_300
2014
+ value: 86.955
2015
+ - type: map_at_500
2016
+ value: 86.956
2017
+ - type: map_at_700
2018
+ value: 86.956
2019
+ - type: map_at_1000
2020
+ value: 86.956
2021
+ - type: recall_at_1
2022
+ value: 72.152
2023
+ - type: recall_at_2
2024
+ value: 84.129
2025
+ - type: recall_at_3
2026
+ value: 88.87
2027
+ - type: recall_at_5
2028
+ value: 93.067
2029
+ - type: recall_at_7
2030
+ value: 94.882
2031
+ - type: recall_at_10
2032
+ value: 96.353
2033
+ - type: recall_at_20
2034
+ value: 98.26700000000001
2035
+ - type: recall_at_30
2036
+ value: 98.92999999999999
2037
+ - type: recall_at_50
2038
+ value: 99.441
2039
+ - type: recall_at_70
2040
+ value: 99.619
2041
+ - type: recall_at_100
2042
+ value: 99.748
2043
+ - type: recall_at_200
2044
+ value: 99.911
2045
+ - type: recall_at_300
2046
+ value: 99.956
2047
+ - type: recall_at_500
2048
+ value: 99.98
2049
+ - type: recall_at_700
2050
+ value: 99.991
2051
+ - type: recall_at_1000
2052
+ value: 99.996
2053
+ - type: precision_at_1
2054
+ value: 82.96
2055
+ - type: precision_at_2
2056
+ value: 52.175000000000004
2057
+ - type: precision_at_3
2058
+ value: 38.223
2059
+ - type: precision_at_5
2060
+ value: 25.056
2061
+ - type: precision_at_7
2062
+ value: 18.717
2063
+ - type: precision_at_10
2064
+ value: 13.614999999999998
2065
+ - type: precision_at_20
2066
+ value: 7.208
2067
+ - type: precision_at_30
2068
+ value: 4.928
2069
+ - type: precision_at_50
2070
+ value: 3.024
2071
+ - type: precision_at_70
2072
+ value: 2.183
2073
+ - type: precision_at_100
2074
+ value: 1.54
2075
+ - type: precision_at_200
2076
+ value: 0.779
2077
+ - type: precision_at_300
2078
+ value: 0.521
2079
+ - type: precision_at_500
2080
+ value: 0.313
2081
+ - type: precision_at_700
2082
+ value: 0.22399999999999998
2083
+ - type: precision_at_1000
2084
+ value: 0.157
2085
+ - type: mrr_at_1
2086
+ value: 82.96
2087
+ - type: mrr_at_2
2088
+ value: 87.005
2089
+ - type: mrr_at_3
2090
+ value: 88.07199999999999
2091
+ - type: mrr_at_5
2092
+ value: 88.634
2093
+ - type: mrr_at_7
2094
+ value: 88.793
2095
+ - type: mrr_at_10
2096
+ value: 88.87899999999999
2097
+ - type: mrr_at_20
2098
+ value: 88.94999999999999
2099
+ - type: mrr_at_30
2100
+ value: 88.96
2101
+ - type: mrr_at_50
2102
+ value: 88.965
2103
+ - type: mrr_at_70
2104
+ value: 88.966
2105
+ - type: mrr_at_100
2106
+ value: 88.967
2107
+ - type: mrr_at_200
2108
+ value: 88.967
2109
+ - type: mrr_at_300
2110
+ value: 88.967
2111
+ - type: mrr_at_500
2112
+ value: 88.967
2113
+ - type: mrr_at_700
2114
+ value: 88.967
2115
+ - type: mrr_at_1000
2116
+ value: 88.967
2117
+ - task:
2118
+ type: Clustering
2119
+ dataset:
2120
+ type: mteb/reddit-clustering
2121
+ name: MTEB RedditClustering
2122
+ config: default
2123
+ split: test
2124
+ revision: 24640382cdbf8abc73003fb0fa6d111a705499eb
2125
+ metrics:
2126
+ - type: v_measure
2127
+ value: 59.90388554491155
2128
+ - task:
2129
+ type: Clustering
2130
+ dataset:
2131
+ type: mteb/reddit-clustering-p2p
2132
+ name: MTEB RedditClusteringP2P
2133
+ config: default
2134
+ split: test
2135
+ revision: 282350215ef01743dc01b456c7f5241fa8937f16
2136
+ metrics:
2137
+ - type: v_measure
2138
+ value: 67.64232539036783
2139
+ - task:
2140
+ type: Retrieval
2141
+ dataset:
2142
+ type: scidocs
2143
+ name: MTEB SCIDOCS
2144
+ config: default
2145
+ split: test
2146
+ revision: None
2147
+ metrics:
2148
+ - type: ndcg_at_1
2149
+ value: 22.6
2150
+ - type: ndcg_at_2
2151
+ value: 20.355999999999998
2152
+ - type: ndcg_at_3
2153
+ value: 18.536
2154
+ - type: ndcg_at_5
2155
+ value: 16.523
2156
+ - type: ndcg_at_7
2157
+ value: 17.979
2158
+ - type: ndcg_at_10
2159
+ value: 19.908
2160
+ - type: ndcg_at_20
2161
+ value: 22.887
2162
+ - type: ndcg_at_30
2163
+ value: 24.43
2164
+ - type: ndcg_at_50
2165
+ value: 25.959
2166
+ - type: ndcg_at_70
2167
+ value: 26.989
2168
+ - type: ndcg_at_100
2169
+ value: 27.977
2170
+ - type: ndcg_at_200
2171
+ value: 29.831000000000003
2172
+ - type: ndcg_at_300
2173
+ value: 30.787
2174
+ - type: ndcg_at_500
2175
+ value: 31.974999999999998
2176
+ - type: ndcg_at_700
2177
+ value: 32.554
2178
+ - type: ndcg_at_1000
2179
+ value: 33.277
2180
+ - type: map_at_1
2181
+ value: 4.593
2182
+ - type: map_at_2
2183
+ value: 6.923
2184
+ - type: map_at_3
2185
+ value: 8.3
2186
+ - type: map_at_5
2187
+ value: 10.072000000000001
2188
+ - type: map_at_7
2189
+ value: 10.782
2190
+ - type: map_at_10
2191
+ value: 11.72
2192
+ - type: map_at_20
2193
+ value: 12.838
2194
+ - type: map_at_30
2195
+ value: 13.257
2196
+ - type: map_at_50
2197
+ value: 13.569
2198
+ - type: map_at_70
2199
+ value: 13.733
2200
+ - type: map_at_100
2201
+ value: 13.858999999999998
2202
+ - type: map_at_200
2203
+ value: 14.018
2204
+ - type: map_at_300
2205
+ value: 14.072999999999999
2206
+ - type: map_at_500
2207
+ value: 14.126
2208
+ - type: map_at_700
2209
+ value: 14.145
2210
+ - type: map_at_1000
2211
+ value: 14.161999999999999
2212
+ - type: recall_at_1
2213
+ value: 4.593
2214
+ - type: recall_at_2
2215
+ value: 7.997999999999999
2216
+ - type: recall_at_3
2217
+ value: 10.563
2218
+ - type: recall_at_5
2219
+ value: 14.907
2220
+ - type: recall_at_7
2221
+ value: 17.4
2222
+ - type: recall_at_10
2223
+ value: 21.18
2224
+ - type: recall_at_20
2225
+ value: 28.144999999999996
2226
+ - type: recall_at_30
2227
+ value: 32.462
2228
+ - type: recall_at_50
2229
+ value: 37.267
2230
+ - type: recall_at_70
2231
+ value: 40.875
2232
+ - type: recall_at_100
2233
+ value: 44.641999999999996
2234
+ - type: recall_at_200
2235
+ value: 52.573
2236
+ - type: recall_at_300
2237
+ value: 57.089999999999996
2238
+ - type: recall_at_500
2239
+ value: 63.14300000000001
2240
+ - type: recall_at_700
2241
+ value: 66.313
2242
+ - type: recall_at_1000
2243
+ value: 70.458
2244
+ - type: precision_at_1
2245
+ value: 22.6
2246
+ - type: precision_at_2
2247
+ value: 19.7
2248
+ - type: precision_at_3
2249
+ value: 17.333000000000002
2250
+ - type: precision_at_5
2251
+ value: 14.680000000000001
2252
+ - type: precision_at_7
2253
+ value: 12.243
2254
+ - type: precision_at_10
2255
+ value: 10.440000000000001
2256
+ - type: precision_at_20
2257
+ value: 6.944999999999999
2258
+ - type: precision_at_30
2259
+ value: 5.333
2260
+ - type: precision_at_50
2261
+ value: 3.678
2262
+ - type: precision_at_70
2263
+ value: 2.881
2264
+ - type: precision_at_100
2265
+ value: 2.2030000000000003
2266
+ - type: precision_at_200
2267
+ value: 1.295
2268
+ - type: precision_at_300
2269
+ value: 0.9369999999999999
2270
+ - type: precision_at_500
2271
+ value: 0.622
2272
+ - type: precision_at_700
2273
+ value: 0.466
2274
+ - type: precision_at_1000
2275
+ value: 0.347
2276
+ - type: mrr_at_1
2277
+ value: 22.6
2278
+ - type: mrr_at_2
2279
+ value: 27.900000000000002
2280
+ - type: mrr_at_3
2281
+ value: 30.067
2282
+ - type: mrr_at_5
2283
+ value: 32.207
2284
+ - type: mrr_at_7
2285
+ value: 33.004
2286
+ - type: mrr_at_10
2287
+ value: 33.596
2288
+ - type: mrr_at_20
2289
+ value: 34.268
2290
+ - type: mrr_at_30
2291
+ value: 34.492
2292
+ - type: mrr_at_50
2293
+ value: 34.628
2294
+ - type: mrr_at_70
2295
+ value: 34.681
2296
+ - type: mrr_at_100
2297
+ value: 34.717
2298
+ - type: mrr_at_200
2299
+ value: 34.757
2300
+ - type: mrr_at_300
2301
+ value: 34.768
2302
+ - type: mrr_at_500
2303
+ value: 34.772
2304
+ - type: mrr_at_700
2305
+ value: 34.774
2306
+ - type: mrr_at_1000
2307
+ value: 34.775
2308
+ - task:
2309
+ type: STS
2310
+ dataset:
2311
+ type: mteb/sickr-sts
2312
+ name: MTEB SICK-R
2313
+ config: default
2314
+ split: test
2315
+ revision: a6ea5a8cab320b040a23452cc28066d9beae2cee
2316
+ metrics:
2317
+ - type: cos_sim_pearson
2318
+ value: 86.90122745229677
2319
+ - type: cos_sim_spearman
2320
+ value: 82.92294737327579
2321
+ - type: euclidean_pearson
2322
+ value: 84.08979655773187
2323
+ - type: euclidean_spearman
2324
+ value: 82.92294657285412
2325
+ - type: manhattan_pearson
2326
+ value: 84.09347480531832
2327
+ - type: manhattan_spearman
2328
+ value: 82.91564613948087
2329
+ - task:
2330
+ type: STS
2331
+ dataset:
2332
+ type: mteb/sts12-sts
2333
+ name: MTEB STS12
2334
+ config: default
2335
+ split: test
2336
+ revision: a0d554a64d88156834ff5ae9920b964011b16384
2337
+ metrics:
2338
+ - type: cos_sim_pearson
2339
+ value: 87.01218713698583
2340
+ - type: cos_sim_spearman
2341
+ value: 79.46865215168464
2342
+ - type: euclidean_pearson
2343
+ value: 83.22621889891909
2344
+ - type: euclidean_spearman
2345
+ value: 79.46853821709514
2346
+ - type: manhattan_pearson
2347
+ value: 83.69962580788805
2348
+ - type: manhattan_spearman
2349
+ value: 79.9561593356932
2350
+ - task:
2351
+ type: STS
2352
+ dataset:
2353
+ type: mteb/sts13-sts
2354
+ name: MTEB STS13
2355
+ config: default
2356
+ split: test
2357
+ revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca
2358
+ metrics:
2359
+ - type: cos_sim_pearson
2360
+ value: 88.98438696342964
2361
+ - type: cos_sim_spearman
2362
+ value: 89.15419511870839
2363
+ - type: euclidean_pearson
2364
+ value: 88.49646141802894
2365
+ - type: euclidean_spearman
2366
+ value: 89.15419503946019
2367
+ - type: manhattan_pearson
2368
+ value: 88.6420585616327
2369
+ - type: manhattan_spearman
2370
+ value: 89.42648950757743
2371
+ - task:
2372
+ type: STS
2373
+ dataset:
2374
+ type: mteb/sts14-sts
2375
+ name: MTEB STS14
2376
+ config: default
2377
+ split: test
2378
+ revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375
2379
+ metrics:
2380
+ - type: cos_sim_pearson
2381
+ value: 87.30772547759544
2382
+ - type: cos_sim_spearman
2383
+ value: 84.93199878424691
2384
+ - type: euclidean_pearson
2385
+ value: 86.16266630395455
2386
+ - type: euclidean_spearman
2387
+ value: 84.93198798543634
2388
+ - type: manhattan_pearson
2389
+ value: 86.14285723189803
2390
+ - type: manhattan_spearman
2391
+ value: 85.0361672522687
2392
+ - task:
2393
+ type: STS
2394
+ dataset:
2395
+ type: mteb/sts15-sts
2396
+ name: MTEB STS15
2397
+ config: default
2398
+ split: test
2399
+ revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3
2400
+ metrics:
2401
+ - type: cos_sim_pearson
2402
+ value: 90.21342071197127
2403
+ - type: cos_sim_spearman
2404
+ value: 90.7407512744838
2405
+ - type: euclidean_pearson
2406
+ value: 90.1517933113061
2407
+ - type: euclidean_spearman
2408
+ value: 90.74075125431919
2409
+ - type: manhattan_pearson
2410
+ value: 90.17963034676193
2411
+ - type: manhattan_spearman
2412
+ value: 90.88999275865135
2413
+ - task:
2414
+ type: STS
2415
+ dataset:
2416
+ type: mteb/sts16-sts
2417
+ name: MTEB STS16
2418
+ config: default
2419
+ split: test
2420
+ revision: 4d8694f8f0e0100860b497b999b3dbed754a0513
2421
+ metrics:
2422
+ - type: cos_sim_pearson
2423
+ value: 86.82518054100498
2424
+ - type: cos_sim_spearman
2425
+ value: 87.81570533154735
2426
+ - type: euclidean_pearson
2427
+ value: 86.91684561573618
2428
+ - type: euclidean_spearman
2429
+ value: 87.81570533154735
2430
+ - type: manhattan_pearson
2431
+ value: 86.98311935744032
2432
+ - type: manhattan_spearman
2433
+ value: 87.9594667151966
2434
+ - task:
2435
+ type: STS
2436
+ dataset:
2437
+ type: mteb/sts17-crosslingual-sts
2438
+ name: MTEB STS17 (en-en)
2439
+ config: en-en
2440
+ split: test
2441
+ revision: af5e6fb845001ecf41f4c1e033ce921939a2a68d
2442
+ metrics:
2443
+ - type: cos_sim_pearson
2444
+ value: 92.09578436612053
2445
+ - type: cos_sim_spearman
2446
+ value: 92.01519349090438
2447
+ - type: euclidean_pearson
2448
+ value: 92.07113635890894
2449
+ - type: euclidean_spearman
2450
+ value: 92.01519349090438
2451
+ - type: manhattan_pearson
2452
+ value: 91.89343820765625
2453
+ - type: manhattan_spearman
2454
+ value: 91.7443476810177
2455
+ - task:
2456
+ type: STS
2457
+ dataset:
2458
+ type: mteb/sts22-crosslingual-sts
2459
+ name: MTEB STS22 (en)
2460
+ config: en
2461
+ split: test
2462
+ revision: 6d1ba47164174a496b7fa5d3569dae26a6813b80
2463
+ metrics:
2464
+ - type: cos_sim_pearson
2465
+ value: 69.29997751464549
2466
+ - type: cos_sim_spearman
2467
+ value: 68.36425436812782
2468
+ - type: euclidean_pearson
2469
+ value: 69.81381677661783
2470
+ - type: euclidean_spearman
2471
+ value: 68.36425436812782
2472
+ - type: manhattan_pearson
2473
+ value: 69.92823397008026
2474
+ - type: manhattan_spearman
2475
+ value: 68.35770640039254
2476
+ - task:
2477
+ type: STS
2478
+ dataset:
2479
+ type: mteb/stsbenchmark-sts
2480
+ name: MTEB STSBenchmark
2481
+ config: default
2482
+ split: test
2483
+ revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831
2484
+ metrics:
2485
+ - type: cos_sim_pearson
2486
+ value: 88.39126315452359
2487
+ - type: cos_sim_spearman
2488
+ value: 88.99708463265337
2489
+ - type: euclidean_pearson
2490
+ value: 88.60793820038607
2491
+ - type: euclidean_spearman
2492
+ value: 88.99708463265337
2493
+ - type: manhattan_pearson
2494
+ value: 88.69860633571047
2495
+ - type: manhattan_spearman
2496
+ value: 89.20094593888012
2497
+ - task:
2498
+ type: Reranking
2499
+ dataset:
2500
+ type: mteb/scidocs-reranking
2501
+ name: MTEB SciDocsRR
2502
+ config: default
2503
+ split: test
2504
+ revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab
2505
+ metrics:
2506
+ - type: map
2507
+ value: 86.58028062818582
2508
+ - type: mrr
2509
+ value: 96.53586790841693
2510
+ - task:
2511
+ type: Retrieval
2512
+ dataset:
2513
+ type: scifact
2514
+ name: MTEB SciFact
2515
+ config: default
2516
+ split: test
2517
+ revision: None
2518
+ metrics:
2519
+ - type: ndcg_at_1
2520
+ value: 66.333
2521
+ - type: ndcg_at_2
2522
+ value: 70.655
2523
+ - type: ndcg_at_3
2524
+ value: 72.801
2525
+ - type: ndcg_at_5
2526
+ value: 75.793
2527
+ - type: ndcg_at_7
2528
+ value: 76.946
2529
+ - type: ndcg_at_10
2530
+ value: 77.66199999999999
2531
+ - type: ndcg_at_20
2532
+ value: 78.786
2533
+ - type: ndcg_at_30
2534
+ value: 79.066
2535
+ - type: ndcg_at_50
2536
+ value: 79.255
2537
+ - type: ndcg_at_70
2538
+ value: 79.423
2539
+ - type: ndcg_at_100
2540
+ value: 79.476
2541
+ - type: ndcg_at_200
2542
+ value: 79.65299999999999
2543
+ - type: ndcg_at_300
2544
+ value: 79.696
2545
+ - type: ndcg_at_500
2546
+ value: 79.73599999999999
2547
+ - type: ndcg_at_700
2548
+ value: 79.77199999999999
2549
+ - type: ndcg_at_1000
2550
+ value: 79.77199999999999
2551
+ - type: map_at_1
2552
+ value: 63.383
2553
+ - type: map_at_2
2554
+ value: 68.144
2555
+ - type: map_at_3
2556
+ value: 70.19800000000001
2557
+ - type: map_at_5
2558
+ value: 72.38
2559
+ - type: map_at_7
2560
+ value: 72.955
2561
+ - type: map_at_10
2562
+ value: 73.312
2563
+ - type: map_at_20
2564
+ value: 73.678
2565
+ - type: map_at_30
2566
+ value: 73.72800000000001
2567
+ - type: map_at_50
2568
+ value: 73.75500000000001
2569
+ - type: map_at_70
2570
+ value: 73.771
2571
+ - type: map_at_100
2572
+ value: 73.776
2573
+ - type: map_at_200
2574
+ value: 73.783
2575
+ - type: map_at_300
2576
+ value: 73.784
2577
+ - type: map_at_500
2578
+ value: 73.785
2579
+ - type: map_at_700
2580
+ value: 73.786
2581
+ - type: map_at_1000
2582
+ value: 73.786
2583
+ - type: recall_at_1
2584
+ value: 63.383
2585
+ - type: recall_at_2
2586
+ value: 72.283
2587
+ - type: recall_at_3
2588
+ value: 77.183
2589
+ - type: recall_at_5
2590
+ value: 84.56099999999999
2591
+ - type: recall_at_7
2592
+ value: 87.67200000000001
2593
+ - type: recall_at_10
2594
+ value: 89.822
2595
+ - type: recall_at_20
2596
+ value: 94
2597
+ - type: recall_at_30
2598
+ value: 95.333
2599
+ - type: recall_at_50
2600
+ value: 96.333
2601
+ - type: recall_at_70
2602
+ value: 97.333
2603
+ - type: recall_at_100
2604
+ value: 97.667
2605
+ - type: recall_at_200
2606
+ value: 99
2607
+ - type: recall_at_300
2608
+ value: 99.333
2609
+ - type: recall_at_500
2610
+ value: 99.667
2611
+ - type: recall_at_700
2612
+ value: 100
2613
+ - type: recall_at_1000
2614
+ value: 100
2615
+ - type: precision_at_1
2616
+ value: 66.333
2617
+ - type: precision_at_2
2618
+ value: 38.667
2619
+ - type: precision_at_3
2620
+ value: 28.111000000000004
2621
+ - type: precision_at_5
2622
+ value: 18.933
2623
+ - type: precision_at_7
2624
+ value: 14.094999999999999
2625
+ - type: precision_at_10
2626
+ value: 10.167
2627
+ - type: precision_at_20
2628
+ value: 5.35
2629
+ - type: precision_at_30
2630
+ value: 3.611
2631
+ - type: precision_at_50
2632
+ value: 2.1870000000000003
2633
+ - type: precision_at_70
2634
+ value: 1.576
2635
+ - type: precision_at_100
2636
+ value: 1.107
2637
+ - type: precision_at_200
2638
+ value: 0.5599999999999999
2639
+ - type: precision_at_300
2640
+ value: 0.374
2641
+ - type: precision_at_500
2642
+ value: 0.22499999999999998
2643
+ - type: precision_at_700
2644
+ value: 0.161
2645
+ - type: precision_at_1000
2646
+ value: 0.11299999999999999
2647
+ - type: mrr_at_1
2648
+ value: 66.333
2649
+ - type: mrr_at_2
2650
+ value: 70.833
2651
+ - type: mrr_at_3
2652
+ value: 72.167
2653
+ - type: mrr_at_5
2654
+ value: 73.6
2655
+ - type: mrr_at_7
2656
+ value: 74.084
2657
+ - type: mrr_at_10
2658
+ value: 74.283
2659
+ - type: mrr_at_20
2660
+ value: 74.54499999999999
2661
+ - type: mrr_at_30
2662
+ value: 74.59599999999999
2663
+ - type: mrr_at_50
2664
+ value: 74.622
2665
+ - type: mrr_at_70
2666
+ value: 74.639
2667
+ - type: mrr_at_100
2668
+ value: 74.643
2669
+ - type: mrr_at_200
2670
+ value: 74.65
2671
+ - type: mrr_at_300
2672
+ value: 74.652
2673
+ - type: mrr_at_500
2674
+ value: 74.653
2675
+ - type: mrr_at_700
2676
+ value: 74.653
2677
+ - type: mrr_at_1000
2678
+ value: 74.653
2679
+ - task:
2680
+ type: PairClassification
2681
+ dataset:
2682
+ type: mteb/sprintduplicatequestions-pairclassification
2683
+ name: MTEB SprintDuplicateQuestions
2684
+ config: default
2685
+ split: test
2686
+ revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46
2687
+ metrics:
2688
+ - type: cos_sim_accuracy
2689
+ value: 99.84554455445544
2690
+ - type: cos_sim_ap
2691
+ value: 96.31178339136798
2692
+ - type: cos_sim_f1
2693
+ value: 92.1921921921922
2694
+ - type: cos_sim_precision
2695
+ value: 92.28456913827655
2696
+ - type: cos_sim_recall
2697
+ value: 92.10000000000001
2698
+ - type: dot_accuracy
2699
+ value: 99.84554455445544
2700
+ - type: dot_ap
2701
+ value: 96.31178339136797
2702
+ - type: dot_f1
2703
+ value: 92.1921921921922
2704
+ - type: dot_precision
2705
+ value: 92.28456913827655
2706
+ - type: dot_recall
2707
+ value: 92.10000000000001
2708
+ - type: euclidean_accuracy
2709
+ value: 99.84554455445544
2710
+ - type: euclidean_ap
2711
+ value: 96.31178339136798
2712
+ - type: euclidean_f1
2713
+ value: 92.1921921921922
2714
+ - type: euclidean_precision
2715
+ value: 92.28456913827655
2716
+ - type: euclidean_recall
2717
+ value: 92.10000000000001
2718
+ - type: manhattan_accuracy
2719
+ value: 99.84752475247525
2720
+ - type: manhattan_ap
2721
+ value: 96.4591954606088
2722
+ - type: manhattan_f1
2723
+ value: 92.25352112676056
2724
+ - type: manhattan_precision
2725
+ value: 92.81376518218623
2726
+ - type: manhattan_recall
2727
+ value: 91.7
2728
+ - type: max_accuracy
2729
+ value: 99.84752475247525
2730
+ - type: max_ap
2731
+ value: 96.4591954606088
2732
+ - type: max_f1
2733
+ value: 92.25352112676056
2734
+ - task:
2735
+ type: Clustering
2736
+ dataset:
2737
+ type: mteb/stackexchange-clustering
2738
+ name: MTEB StackExchangeClustering
2739
+ config: default
2740
+ split: test
2741
+ revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259
2742
+ metrics:
2743
+ - type: v_measure
2744
+ value: 74.24659759283294
2745
+ - task:
2746
+ type: Clustering
2747
+ dataset:
2748
+ type: mteb/stackexchange-clustering-p2p
2749
+ name: MTEB StackExchangeClusteringP2P
2750
+ config: default
2751
+ split: test
2752
+ revision: 815ca46b2622cec33ccafc3735d572c266efdb44
2753
+ metrics:
2754
+ - type: v_measure
2755
+ value: 46.77690051260451
2756
+ - task:
2757
+ type: Reranking
2758
+ dataset:
2759
+ type: mteb/stackoverflowdupquestions-reranking
2760
+ name: MTEB StackOverflowDupQuestions
2761
+ config: default
2762
+ split: test
2763
+ revision: e185fbe320c72810689fc5848eb6114e1ef5ec69
2764
+ metrics:
2765
+ - type: map
2766
+ value: 55.68436757803185
2767
+ - type: mrr
2768
+ value: 56.82157711569475
2769
+ - task:
2770
+ type: Summarization
2771
+ dataset:
2772
+ type: mteb/summeval
2773
+ name: MTEB SummEval
2774
+ config: default
2775
+ split: test
2776
+ revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c
2777
+ metrics:
2778
+ - type: cos_sim_pearson
2779
+ value: 31.652482405629843
2780
+ - type: cos_sim_spearman
2781
+ value: 31.16341822347735
2782
+ - type: dot_pearson
2783
+ value: 31.652479892699837
2784
+ - type: dot_spearman
2785
+ value: 31.16341822347735
2786
+ - task:
2787
+ type: Retrieval
2788
+ dataset:
2789
+ type: trec-covid
2790
+ name: MTEB TRECCOVID
2791
+ config: default
2792
+ split: test
2793
+ revision: None
2794
+ metrics:
2795
+ - type: ndcg_at_1
2796
+ value: 92
2797
+ - type: ndcg_at_2
2798
+ value: 90.839
2799
+ - type: ndcg_at_3
2800
+ value: 90.642
2801
+ - type: ndcg_at_5
2802
+ value: 90.348
2803
+ - type: ndcg_at_7
2804
+ value: 89.015
2805
+ - type: ndcg_at_10
2806
+ value: 87.599
2807
+ - type: ndcg_at_20
2808
+ value: 84.434
2809
+ - type: ndcg_at_30
2810
+ value: 81.655
2811
+ - type: ndcg_at_50
2812
+ value: 77.278
2813
+ - type: ndcg_at_70
2814
+ value: 73.957
2815
+ - type: ndcg_at_100
2816
+ value: 69.56
2817
+ - type: ndcg_at_200
2818
+ value: 60.724000000000004
2819
+ - type: ndcg_at_300
2820
+ value: 57.245000000000005
2821
+ - type: ndcg_at_500
2822
+ value: 56.316
2823
+ - type: ndcg_at_700
2824
+ value: 58.399
2825
+ - type: ndcg_at_1000
2826
+ value: 62.21600000000001
2827
+ - type: map_at_1
2828
+ value: 0.247
2829
+ - type: map_at_2
2830
+ value: 0.488
2831
+ - type: map_at_3
2832
+ value: 0.7230000000000001
2833
+ - type: map_at_5
2834
+ value: 1.204
2835
+ - type: map_at_7
2836
+ value: 1.6500000000000001
2837
+ - type: map_at_10
2838
+ value: 2.292
2839
+ - type: map_at_20
2840
+ value: 4.274
2841
+ - type: map_at_30
2842
+ value: 6.027
2843
+ - type: map_at_50
2844
+ value: 9.083
2845
+ - type: map_at_70
2846
+ value: 11.751000000000001
2847
+ - type: map_at_100
2848
+ value: 14.912
2849
+ - type: map_at_200
2850
+ value: 22.213
2851
+ - type: map_at_300
2852
+ value: 26.667999999999996
2853
+ - type: map_at_500
2854
+ value: 31.556
2855
+ - type: map_at_700
2856
+ value: 34.221000000000004
2857
+ - type: map_at_1000
2858
+ value: 36.443999999999996
2859
+ - type: recall_at_1
2860
+ value: 0.247
2861
+ - type: recall_at_2
2862
+ value: 0.49899999999999994
2863
+ - type: recall_at_3
2864
+ value: 0.742
2865
+ - type: recall_at_5
2866
+ value: 1.247
2867
+ - type: recall_at_7
2868
+ value: 1.722
2869
+ - type: recall_at_10
2870
+ value: 2.405
2871
+ - type: recall_at_20
2872
+ value: 4.583
2873
+ - type: recall_at_30
2874
+ value: 6.587999999999999
2875
+ - type: recall_at_50
2876
+ value: 10.188
2877
+ - type: recall_at_70
2878
+ value: 13.496
2879
+ - type: recall_at_100
2880
+ value: 17.578
2881
+ - type: recall_at_200
2882
+ value: 28.158
2883
+ - type: recall_at_300
2884
+ value: 35.532000000000004
2885
+ - type: recall_at_500
2886
+ value: 45.31
2887
+ - type: recall_at_700
2888
+ value: 51.822
2889
+ - type: recall_at_1000
2890
+ value: 58.53
2891
+ - type: precision_at_1
2892
+ value: 96
2893
+ - type: precision_at_2
2894
+ value: 96
2895
+ - type: precision_at_3
2896
+ value: 95.333
2897
+ - type: precision_at_5
2898
+ value: 94.8
2899
+ - type: precision_at_7
2900
+ value: 93.429
2901
+ - type: precision_at_10
2902
+ value: 91.4
2903
+ - type: precision_at_20
2904
+ value: 87.7
2905
+ - type: precision_at_30
2906
+ value: 84.867
2907
+ - type: precision_at_50
2908
+ value: 80.24
2909
+ - type: precision_at_70
2910
+ value: 76.371
2911
+ - type: precision_at_100
2912
+ value: 71.08
2913
+ - type: precision_at_200
2914
+ value: 59.4
2915
+ - type: precision_at_300
2916
+ value: 51.459999999999994
2917
+ - type: precision_at_500
2918
+ value: 40.644000000000005
2919
+ - type: precision_at_700
2920
+ value: 33.889
2921
+ - type: precision_at_1000
2922
+ value: 27.250000000000004
2923
+ - type: mrr_at_1
2924
+ value: 96
2925
+ - type: mrr_at_2
2926
+ value: 98
2927
+ - type: mrr_at_3
2928
+ value: 98
2929
+ - type: mrr_at_5
2930
+ value: 98
2931
+ - type: mrr_at_7
2932
+ value: 98
2933
+ - type: mrr_at_10
2934
+ value: 98
2935
+ - type: mrr_at_20
2936
+ value: 98
2937
+ - type: mrr_at_30
2938
+ value: 98
2939
+ - type: mrr_at_50
2940
+ value: 98
2941
+ - type: mrr_at_70
2942
+ value: 98
2943
+ - type: mrr_at_100
2944
+ value: 98
2945
+ - type: mrr_at_200
2946
+ value: 98
2947
+ - type: mrr_at_300
2948
+ value: 98
2949
+ - type: mrr_at_500
2950
+ value: 98
2951
+ - type: mrr_at_700
2952
+ value: 98
2953
+ - type: mrr_at_1000
2954
+ value: 98
2955
+ - task:
2956
+ type: Retrieval
2957
+ dataset:
2958
+ type: webis-touche2020
2959
+ name: MTEB Touche2020
2960
+ config: default
2961
+ split: test
2962
+ revision: None
2963
+ metrics:
2964
+ - type: ndcg_at_1
2965
+ value: 43.878
2966
+ - type: ndcg_at_2
2967
+ value: 37.956
2968
+ - type: ndcg_at_3
2969
+ value: 35.053
2970
+ - type: ndcg_at_5
2971
+ value: 32.59
2972
+ - type: ndcg_at_7
2973
+ value: 30.226
2974
+ - type: ndcg_at_10
2975
+ value: 29.005
2976
+ - type: ndcg_at_20
2977
+ value: 30.11
2978
+ - type: ndcg_at_30
2979
+ value: 32.019999999999996
2980
+ - type: ndcg_at_50
2981
+ value: 34.354
2982
+ - type: ndcg_at_70
2983
+ value: 36.665
2984
+ - type: ndcg_at_100
2985
+ value: 38.888
2986
+ - type: ndcg_at_200
2987
+ value: 43.435
2988
+ - type: ndcg_at_300
2989
+ value: 45.795
2990
+ - type: ndcg_at_500
2991
+ value: 48.699999999999996
2992
+ - type: ndcg_at_700
2993
+ value: 50.242
2994
+ - type: ndcg_at_1000
2995
+ value: 51.529
2996
+ - type: map_at_1
2997
+ value: 3.521
2998
+ - type: map_at_2
2999
+ value: 5.309
3000
+ - type: map_at_3
3001
+ value: 6.576
3002
+ - type: map_at_5
3003
+ value: 8.97
3004
+ - type: map_at_7
3005
+ value: 10.194
3006
+ - type: map_at_10
3007
+ value: 11.949
3008
+ - type: map_at_20
3009
+ value: 14.686
3010
+ - type: map_at_30
3011
+ value: 15.8
3012
+ - type: map_at_50
3013
+ value: 16.59
3014
+ - type: map_at_70
3015
+ value: 17.2
3016
+ - type: map_at_100
3017
+ value: 17.765
3018
+ - type: map_at_200
3019
+ value: 18.636
3020
+ - type: map_at_300
3021
+ value: 18.972
3022
+ - type: map_at_500
3023
+ value: 19.301
3024
+ - type: map_at_700
3025
+ value: 19.445
3026
+ - type: map_at_1000
3027
+ value: 19.546
3028
+ - type: recall_at_1
3029
+ value: 3.521
3030
+ - type: recall_at_2
3031
+ value: 5.848
3032
+ - type: recall_at_3
3033
+ value: 7.657
3034
+ - type: recall_at_5
3035
+ value: 11.368
3036
+ - type: recall_at_7
3037
+ value: 13.748
3038
+ - type: recall_at_10
3039
+ value: 18.061
3040
+ - type: recall_at_20
3041
+ value: 26.844
3042
+ - type: recall_at_30
3043
+ value: 31.186000000000003
3044
+ - type: recall_at_50
3045
+ value: 35.951
3046
+ - type: recall_at_70
3047
+ value: 40.961999999999996
3048
+ - type: recall_at_100
3049
+ value: 46.743
3050
+ - type: recall_at_200
3051
+ value: 58.483
3052
+ - type: recall_at_300
3053
+ value: 65.973
3054
+ - type: recall_at_500
3055
+ value: 75.233
3056
+ - type: recall_at_700
3057
+ value: 80.472
3058
+ - type: recall_at_1000
3059
+ value: 85.02
3060
+ - type: precision_at_1
3061
+ value: 46.939
3062
+ - type: precision_at_2
3063
+ value: 38.775999999999996
3064
+ - type: precision_at_3
3065
+ value: 34.694
3066
+ - type: precision_at_5
3067
+ value: 31.429000000000002
3068
+ - type: precision_at_7
3069
+ value: 27.697
3070
+ - type: precision_at_10
3071
+ value: 24.490000000000002
3072
+ - type: precision_at_20
3073
+ value: 18.776
3074
+ - type: precision_at_30
3075
+ value: 15.034
3076
+ - type: precision_at_50
3077
+ value: 10.857
3078
+ - type: precision_at_70
3079
+ value: 9.096
3080
+ - type: precision_at_100
3081
+ value: 7.51
3082
+ - type: precision_at_200
3083
+ value: 4.929
3084
+ - type: precision_at_300
3085
+ value: 3.7760000000000002
3086
+ - type: precision_at_500
3087
+ value: 2.6780000000000004
3088
+ - type: precision_at_700
3089
+ value: 2.085
3090
+ - type: precision_at_1000
3091
+ value: 1.5709999999999997
3092
+ - type: mrr_at_1
3093
+ value: 46.939
3094
+ - type: mrr_at_2
3095
+ value: 55.102
3096
+ - type: mrr_at_3
3097
+ value: 57.823
3098
+ - type: mrr_at_5
3099
+ value: 60.68
3100
+ - type: mrr_at_7
3101
+ value: 60.972
3102
+ - type: mrr_at_10
3103
+ value: 61.199000000000005
3104
+ - type: mrr_at_20
3105
+ value: 61.831
3106
+ - type: mrr_at_30
3107
+ value: 61.831
3108
+ - type: mrr_at_50
3109
+ value: 61.873
3110
+ - type: mrr_at_70
3111
+ value: 61.873
3112
+ - type: mrr_at_100
3113
+ value: 61.873
3114
+ - type: mrr_at_200
3115
+ value: 61.873
3116
+ - type: mrr_at_300
3117
+ value: 61.873
3118
+ - type: mrr_at_500
3119
+ value: 61.873
3120
+ - type: mrr_at_700
3121
+ value: 61.873
3122
+ - type: mrr_at_1000
3123
+ value: 61.873
3124
+ - task:
3125
+ type: Classification
3126
+ dataset:
3127
+ type: mteb/toxic_conversations_50k
3128
+ name: MTEB ToxicConversationsClassification
3129
+ config: default
3130
+ split: test
3131
+ revision: d7c0de2777da35d6aae2200a62c6e0e5af397c4c
3132
+ metrics:
3133
+ - type: accuracy
3134
+ value: 69.3294
3135
+ - type: ap
3136
+ value: 14.561333393364736
3137
+ - type: f1
3138
+ value: 53.992309820496466
3139
+ - task:
3140
+ type: Classification
3141
+ dataset:
3142
+ type: mteb/tweet_sentiment_extraction
3143
+ name: MTEB TweetSentimentExtractionClassification
3144
+ config: default
3145
+ split: test
3146
+ revision: d604517c81ca91fe16a244d1248fc021f9ecee7a
3147
+ metrics:
3148
+ - type: accuracy
3149
+ value: 63.63893604980192
3150
+ - type: f1
3151
+ value: 63.92959380489434
3152
+ - task:
3153
+ type: Clustering
3154
+ dataset:
3155
+ type: mteb/twentynewsgroups-clustering
3156
+ name: MTEB TwentyNewsgroupsClustering
3157
+ config: default
3158
+ split: test
3159
+ revision: 6125ec4e24fa026cec8a478383ee943acfbd5449
3160
+ metrics:
3161
+ - type: v_measure
3162
+ value: 56.270879258659775
3163
+ - task:
3164
+ type: PairClassification
3165
+ dataset:
3166
+ type: mteb/twittersemeval2015-pairclassification
3167
+ name: MTEB TwitterSemEval2015
3168
+ config: default
3169
+ split: test
3170
+ revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
3171
+ metrics:
3172
+ - type: cos_sim_accuracy
3173
+ value: 88.71073493473207
3174
+ - type: cos_sim_ap
3175
+ value: 81.52392540284202
3176
+ - type: cos_sim_f1
3177
+ value: 74.71162377994676
3178
+ - type: cos_sim_precision
3179
+ value: 71.89558428885094
3180
+ - type: cos_sim_recall
3181
+ value: 77.75725593667546
3182
+ - type: dot_accuracy
3183
+ value: 88.71073493473207
3184
+ - type: dot_ap
3185
+ value: 81.52394754041109
3186
+ - type: dot_f1
3187
+ value: 74.71162377994676
3188
+ - type: dot_precision
3189
+ value: 71.89558428885094
3190
+ - type: dot_recall
3191
+ value: 77.75725593667546
3192
+ - type: euclidean_accuracy
3193
+ value: 88.71073493473207
3194
+ - type: euclidean_ap
3195
+ value: 81.52392035435321
3196
+ - type: euclidean_f1
3197
+ value: 74.71162377994676
3198
+ - type: euclidean_precision
3199
+ value: 71.89558428885094
3200
+ - type: euclidean_recall
3201
+ value: 77.75725593667546
3202
+ - type: manhattan_accuracy
3203
+ value: 88.47231328604637
3204
+ - type: manhattan_ap
3205
+ value: 81.22907439267321
3206
+ - type: manhattan_f1
3207
+ value: 74.3351571446749
3208
+ - type: manhattan_precision
3209
+ value: 71.78667977390022
3210
+ - type: manhattan_recall
3211
+ value: 77.0712401055409
3212
+ - type: max_accuracy
3213
+ value: 88.71073493473207
3214
+ - type: max_ap
3215
+ value: 81.52394754041109
3216
+ - type: max_f1
3217
+ value: 74.71162377994676
3218
+ - task:
3219
+ type: PairClassification
3220
+ dataset:
3221
+ type: mteb/twitterurlcorpus-pairclassification
3222
+ name: MTEB TwitterURLCorpus
3223
+ config: default
3224
+ split: test
3225
+ revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
3226
+ metrics:
3227
+ - type: cos_sim_accuracy
3228
+ value: 89.85136026700819
3229
+ - type: cos_sim_ap
3230
+ value: 87.7768002924216
3231
+ - type: cos_sim_f1
3232
+ value: 80.358908624794
3233
+ - type: cos_sim_precision
3234
+ value: 76.62918209122023
3235
+ - type: cos_sim_recall
3236
+ value: 84.47028025870034
3237
+ - type: dot_accuracy
3238
+ value: 89.85136026700819
3239
+ - type: dot_ap
3240
+ value: 87.77680027889778
3241
+ - type: dot_f1
3242
+ value: 80.358908624794
3243
+ - type: dot_precision
3244
+ value: 76.62918209122023
3245
+ - type: dot_recall
3246
+ value: 84.47028025870034
3247
+ - type: euclidean_accuracy
3248
+ value: 89.85136026700819
3249
+ - type: euclidean_ap
3250
+ value: 87.77680174697751
3251
+ - type: euclidean_f1
3252
+ value: 80.358908624794
3253
+ - type: euclidean_precision
3254
+ value: 76.62918209122023
3255
+ - type: euclidean_recall
3256
+ value: 84.47028025870034
3257
+ - type: manhattan_accuracy
3258
+ value: 89.86300306593705
3259
+ - type: manhattan_ap
3260
+ value: 87.78613271895861
3261
+ - type: manhattan_f1
3262
+ value: 80.31831016905645
3263
+ - type: manhattan_precision
3264
+ value: 76.68230516070304
3265
+ - type: manhattan_recall
3266
+ value: 84.3162919618109
3267
+ - type: max_accuracy
3268
+ value: 89.86300306593705
3269
+ - type: max_ap
3270
+ value: 87.78613271895861
3271
+ - type: max_f1
3272
+ value: 80.358908624794
3273
+ language:
3274
+ - en
3275
+ license: cc-by-nc-4.0
3276
+ ---
3277
+
3278
+ <h1 align="center">Salesforce/SFR-Embedding-Mistral</h1>
3279
+
3280
+ **SFR-Embedding by Salesforce Research.**
3281
+
3282
+ The model is trained on top of [E5-mistral-7b-instruct](https://huggingface.co/intfloat/e5-mistral-7b-instruct) and [Mistral-7B-v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1).
3283
+
3284
+ This project is for research purposes only. Third-party datasets may be subject to additional terms and conditions under their associated licenses. Please refer to specific papers for more details:
3285
+ - [MTEB benchmark](https://arxiv.org/abs/2210.07316)
3286
+ - [Mistral](https://arxiv.org/abs/2310.06825)
3287
+ - [E5-mistral-7b-instruct](https://arxiv.org/pdf/2401.00368.pdf)
3288
+
3289
+ More technical details will be updated later.
3290
+
3291
+ ## How to run
3292
+
3293
+ ### Transformers
3294
+ The models can be used as follows:
3295
+ ```python
3296
+ import torch
3297
+ import torch.nn.functional as F
3298
+ from torch import Tensor
3299
+ from transformers import AutoTokenizer, AutoModel
3300
+
3301
+ def last_token_pool(last_hidden_states: Tensor,
3302
+ attention_mask: Tensor) -> Tensor:
3303
+ left_padding = (attention_mask[:, -1].sum() == attention_mask.shape[0])
3304
+ if left_padding:
3305
+ return last_hidden_states[:, -1]
3306
+ else:
3307
+ sequence_lengths = attention_mask.sum(dim=1) - 1
3308
+ batch_size = last_hidden_states.shape[0]
3309
+ return last_hidden_states[torch.arange(batch_size, device=last_hidden_states.device), sequence_lengths]
3310
+
3311
+ def get_detailed_instruct(task_description: str, query: str) -> str:
3312
+ return f'Instruct: {task_description}\nQuery: {query}'
3313
+
3314
+ # Each query must come with a one-sentence instruction that describes the task
3315
+ task = 'Given a web search query, retrieve relevant passages that answer the query'
3316
+ queries = [
3317
+ get_detailed_instruct(task, 'How to bake a chocolate cake'),
3318
+ get_detailed_instruct(task, 'Symptoms of the flu')
3319
+ ]
3320
+ # No need to add instruction for retrieval documents
3321
+ passages = [
3322
+ "To bake a delicious chocolate cake, you'll need the following ingredients: all-purpose flour, sugar, cocoa powder, baking powder, baking soda, salt, eggs, milk, vegetable oil, and vanilla extract. Start by preheating your oven to 350°F (175°C). In a mixing bowl, combine the dry ingredients (flour, sugar, cocoa powder, baking powder, baking soda, and salt). In a separate bowl, whisk together the wet ingredients (eggs, milk, vegetable oil, and vanilla extract). Gradually add the wet mixture to the dry ingredients, stirring until well combined. Pour the batter into a greased cake pan and bake for 30-35 minutes. Let it cool before frosting with your favorite chocolate frosting. Enjoy your homemade chocolate cake!",
3323
+ "The flu, or influenza, is an illness caused by influenza viruses. Common symptoms of the flu include a high fever, chills, cough, sore throat, runny or stuffy nose, body aches, headache, fatigue, and sometimes nausea and vomiting. These symptoms can come on suddenly and are usually more severe than the common cold. It's important to get plenty of rest, stay hydrated, and consult a healthcare professional if you suspect you have the flu. In some cases, antiviral medications can help alleviate symptoms and reduce the duration of the illness."
3324
+ ]
3325
+
3326
+ # load model and tokenizer
3327
+ tokenizer = AutoTokenizer.from_pretrained('Salesforce/SFR-Embedding-Mistral')
3328
+ model = AutoModel.from_pretrained('Salesforce/SFR-Embedding-Mistral')
3329
+
3330
+ # get the embeddings
3331
+ max_length = 4096
3332
+ input_texts = queries + passages
3333
+ batch_dict = tokenizer(input_texts, max_length=max_length, padding=True, truncation=True, return_tensors="pt")
3334
+ outputs = model(**batch_dict)
3335
+ embeddings = last_token_pool(outputs.last_hidden_state, batch_dict['attention_mask'])
3336
+
3337
+ # normalize embeddings
3338
+ embeddings = F.normalize(embeddings, p=2, dim=1)
3339
+ scores = (embeddings[:2] @ embeddings[2:].T) * 100
3340
+ print(scores.tolist())
3341
+ # [[86.7153549194336, 36.64569091796875], [35.00493621826172, 82.0738525390625]]
3342
+ ```
3343
+
3344
+ ### Sentence Transformers
3345
+ ```python
3346
+
3347
+ from sentence_transformers import SentenceTransformer, util
3348
+
3349
+ model = SentenceTransformer("Salesforce/SFR-Embedding-Mistral")
3350
+
3351
+ def get_detailed_instruct(task_description: str, query: str) -> str:
3352
+ return f'Instruct: {task_description}\nQuery: {query}'
3353
+
3354
+ # Each query must come with a one-sentence instruction that describes the task
3355
+ task = 'Given a web search query, retrieve relevant passages that answer the query'
3356
+ queries = [
3357
+ get_detailed_instruct(task, 'How to bake a chocolate cake'),
3358
+ get_detailed_instruct(task, 'Symptoms of the flu')
3359
+ ]
3360
+ # No need to add instruction for retrieval documents
3361
+ passages = [
3362
+ "To bake a delicious chocolate cake, you'll need the following ingredients: all-purpose flour, sugar, cocoa powder, baking powder, baking soda, salt, eggs, milk, vegetable oil, and vanilla extract. Start by preheating your oven to 350°F (175°C). In a mixing bowl, combine the dry ingredients (flour, sugar, cocoa powder, baking powder, baking soda, and salt). In a separate bowl, whisk together the wet ingredients (eggs, milk, vegetable oil, and vanilla extract). Gradually add the wet mixture to the dry ingredients, stirring until well combined. Pour the batter into a greased cake pan and bake for 30-35 minutes. Let it cool before frosting with your favorite chocolate frosting. Enjoy your homemade chocolate cake!",
3363
+ "The flu, or influenza, is an illness caused by influenza viruses. Common symptoms of the flu include a high fever, chills, cough, sore throat, runny or stuffy nose, body aches, headache, fatigue, and sometimes nausea and vomiting. These symptoms can come on suddenly and are usually more severe than the common cold. It's important to get plenty of rest, stay hydrated, and consult a healthcare professional if you suspect you have the flu. In some cases, antiviral medications can help alleviate symptoms and reduce the duration of the illness."
3364
+ ]
3365
+
3366
+ embeddings = model.encode(queries + passages)
3367
+ scores = util.cos_sim(embeddings[:2], embeddings[2:]) * 100
3368
+ print(scores.tolist())
3369
+ # [[86.71537780761719, 36.645721435546875], [35.00497055053711, 82.07388305664062]]
3370
+ ```
3371
+
3372
+ ### MTEB Benchmark Evaluation
3373
+ Check out [unilm/e5](https://github.com/microsoft/unilm/tree/master/e5) to reproduce evaluation results on the [BEIR](https://arxiv.org/abs/2104.08663) and [MTEB](https://arxiv.org/abs/2210.07316) benchmark.
3374
+
3375
+
3376
+ SFR-Embedding Team (∗indicates lead contributors).
3377
+ * Rui Meng*
3378
+ * Ye Liu*
3379
+ * Shafiq Rayhan Joty
3380
+ * Caiming Xiong
3381
+ * Yingbo Zhou
3382
+ * Semih Yavuz
3383
+
3384
+ ### Citation
3385
+ ```bibtex
3386
+ @misc{SFRAIResearch2024,
3387
+ title={SFR-Embedding-Mistral:Enhance Text Retrieval with Transfer Learning},
3388
+ author={Rui Meng, Ye Liu, Shafiq Rayhan Joty, Caiming Xiong, Yingbo Zhou, Semih Yavuz},
3389
+ howpublished={Salesforce AI Research Blog},
3390
+ year={2024},
3391
+ url={https://blog.salesforceairesearch.com/sfr-embedded-mistral/}
3392
+ }
3393
+ ```
3394
+
3395
+
3396
+
3397
+
3398
+
cache/models--Salesforce--SFR-Embedding-Mistral/refs/main ADDED
@@ -0,0 +1 @@
 
 
1
+ 938c560d1c236aa563b2dbdf084f28ab28bccb11
cache/models--Salesforce--SFR-Embedding-Mistral/snapshots/938c560d1c236aa563b2dbdf084f28ab28bccb11/README.md ADDED
@@ -0,0 +1 @@
 
 
1
+ ../../blobs/feb95adc7e79e878999ba5a1d3ddfe9f16eff0f1
cache/models--Salesforce--SFR-Embedding-Mistral/snapshots/938c560d1c236aa563b2dbdf084f28ab28bccb11/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ../../blobs/c19160bba3c1267f959caf6d13fb07f9ea232e04
cache/models--Salesforce--SFR-Embedding-Mistral/snapshots/938c560d1c236aa563b2dbdf084f28ab28bccb11/config_sentence_transformers.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ../../blobs/ef62bf21fb2396937098b86ae80c68813b229c18
cache/models--Salesforce--SFR-Embedding-Mistral/snapshots/938c560d1c236aa563b2dbdf084f28ab28bccb11/model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ../../blobs/f8194e4e9432d287bf257d4a7d4a0f2446c32da8
cache/models--Salesforce--SFR-Embedding-Mistral/snapshots/938c560d1c236aa563b2dbdf084f28ab28bccb11/modules.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ../../blobs/f7640f94e81bb7f4f04daf1668850b38763a13d9
cache/models--Salesforce--SFR-Embedding-Mistral/snapshots/938c560d1c236aa563b2dbdf084f28ab28bccb11/sentence_bert_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ../../blobs/42dcdfcaf9e42a488d4be06500dd771d7aa11e83
docker-compose.yml ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ version: "3.5"
2
+
3
+ networks:
4
+ metavoice-net:
5
+ driver: bridge
6
+
7
+ volumes:
8
+ hf-cache:
9
+ driver: local
10
+
11
+ x-common-settings: &common-settings
12
+ volumes:
13
+ - hf-cache:/.hf-cache
14
+ - ./assets:/app/assets
15
+ deploy:
16
+ replicas: 1
17
+ resources:
18
+ reservations:
19
+ devices:
20
+ - driver: nvidia
21
+ count: 1
22
+ capabilities: [ gpu ]
23
+ runtime: nvidia
24
+ ipc: host
25
+ tty: true # enable colorized logs
26
+ build:
27
+ context: .
28
+ image: metavoice-server:latest
29
+ networks:
30
+ - metavoice-net
31
+ environment:
32
+ - NVIDIA_VISIBLE_DEVICES=all
33
+ - HF_HOME=/.hf-cache
34
+ logging:
35
+ options:
36
+ max-size: "100m"
37
+ max-file: "10"
38
+
39
+ services:
40
+ server:
41
+ <<: *common-settings
42
+ container_name: metavoice-server
43
+ command: [ "--port=58004" ]
44
+ ports:
45
+ - 58004:58004
46
+ healthcheck:
47
+ test: [ "CMD", "curl", "http://metavoice-server:58004/health" ]
48
+ interval: 1m
49
+ timeout: 10s
50
+ retries: 20
51
+ ui:
52
+ <<: *common-settings
53
+ container_name: metavoice-ui
54
+ entrypoint: [ "python3.10", "app.py" ]
55
+ ports:
56
+ - 7861:7861
57
+ healthcheck:
58
+ test: [ "CMD", "curl", "http://localhost:7861" ]
59
+ interval: 1m
60
+ timeout: 10s
61
+ retries: 1
emo-knob-teaser-1.svg ADDED
fam/__init__.py ADDED
File without changes
fam/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (149 Bytes). View file
 
fam/__pycache__/__init__.cpython-39.pyc ADDED
Binary file (145 Bytes). View file
 
fam/llm/__init__.py ADDED
File without changes
fam/llm/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (153 Bytes). View file
 
fam/llm/__pycache__/__init__.cpython-39.pyc ADDED
Binary file (149 Bytes). View file
 
fam/llm/__pycache__/decoders.cpython-310.pyc ADDED
Binary file (3.52 kB). View file
 
fam/llm/__pycache__/decoders.cpython-39.pyc ADDED
Binary file (3.49 kB). View file
 
fam/llm/__pycache__/enhancers.cpython-310.pyc ADDED
Binary file (3.64 kB). View file
 
fam/llm/__pycache__/enhancers.cpython-39.pyc ADDED
Binary file (3.62 kB). View file
 
fam/llm/__pycache__/fast_inference.cpython-310.pyc ADDED
Binary file (4.53 kB). View file
 
fam/llm/__pycache__/fast_inference.cpython-39.pyc ADDED
Binary file (4.51 kB). View file
 
fam/llm/__pycache__/fast_inference_utils.cpython-310.pyc ADDED
Binary file (9.71 kB). View file
 
fam/llm/__pycache__/fast_inference_utils.cpython-39.pyc ADDED
Binary file (9.64 kB). View file
 
fam/llm/__pycache__/fast_model.cpython-310.pyc ADDED
Binary file (9.15 kB). View file
 
fam/llm/__pycache__/fast_model.cpython-39.pyc ADDED
Binary file (9.14 kB). View file
 
fam/llm/__pycache__/inference.cpython-310.pyc ADDED
Binary file (15.7 kB). View file
 
fam/llm/__pycache__/inference.cpython-39.pyc ADDED
Binary file (15.6 kB). View file
 
fam/llm/__pycache__/model.cpython-310.pyc ADDED
Binary file (12.9 kB). View file
 
fam/llm/__pycache__/model.cpython-39.pyc ADDED
Binary file (12.9 kB). View file
 
fam/llm/__pycache__/utils.cpython-310.pyc ADDED
Binary file (2.51 kB). View file