Tom Aarsen commited on
Commit
b97fd4d
1 Parent(s): 78dcd97

Adding Sentence Transformers files & update README

Browse files
1_Pooling/config.json CHANGED
@@ -3,5 +3,8 @@
3
  "pooling_mode_cls_token": true,
4
  "pooling_mode_mean_tokens": false,
5
  "pooling_mode_max_tokens": false,
6
- "pooling_mode_mean_sqrt_len_tokens": false
 
 
 
7
  }
 
3
  "pooling_mode_cls_token": true,
4
  "pooling_mode_mean_tokens": false,
5
  "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false,
7
+ "pooling_mode_weightedmean_tokens": false,
8
+ "pooling_mode_lasttoken": false,
9
+ "include_prompt": true
10
  }
README.md CHANGED
@@ -2636,9 +2636,7 @@ from sentence_transformers import models, SentenceTransformer
2636
  from sentence_transformers.util import cos_sim
2637
 
2638
  # 1. load model with `cls` pooling
2639
- word_embedding_model = models.Transformer("mixedbread-ai/mxbai-embed-2d-large-v1")
2640
- pooling_model = models.Pooling(word_embedding_model.get_word_embedding_dimension(), pooling_mode="cls")
2641
- model = SentenceTransformer(modules=[word_embedding_model, pooling_model])
2642
 
2643
  # 2. set adaptive layer and embedding size.
2644
  # it is recommended to set layers from 20 to 24.
 
2636
  from sentence_transformers.util import cos_sim
2637
 
2638
  # 1. load model with `cls` pooling
2639
+ model = SentenceTransformer("mixedbread-ai/mxbai-embed-2d-large-v1")
 
 
2640
 
2641
  # 2. set adaptive layer and embedding size.
2642
  # it is recommended to set layers from 20 to 24.
config_sentence_transformers.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "2.5.1",
4
+ "transformers": "4.37.0",
5
+ "pytorch": "2.1.0+cu121"
6
+ },
7
+ "prompts": {},
8
+ "default_prompt_name": null
9
+ }
modules.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ }
14
+ ]
sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 512,
3
+ "do_lower_case": false
4
+ }