BlackBeenie
commited on
Add new SentenceTransformer model
Browse files- .gitattributes +1 -0
- 0_Transformer/config.json +67 -0
- 0_Transformer/model.safetensors +3 -0
- 0_Transformer/sentence_bert_config.json +4 -0
- 0_Transformer/special_tokens_map.json +51 -0
- 0_Transformer/tokenizer.json +3 -0
- 0_Transformer/tokenizer_config.json +61 -0
- 1_Pooling/config.json +10 -0
- README.md +739 -0
- config_sentence_transformers.json +16 -0
- modules.json +20 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
0_Transformer/tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
0_Transformer/config.json
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/content/drive/MyDrive/Research/AIRetriever/bpr_output/jinaai/jina-embeddings-v3-v3-msmarco/0_Transformer",
|
3 |
+
"architectures": [
|
4 |
+
"XLMRobertaModel"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"auto_map": {
|
8 |
+
"AutoConfig": "jinaai/xlm-roberta-flash-implementation--configuration_xlm_roberta.XLMRobertaFlashConfig",
|
9 |
+
"AutoModel": "jinaai/xlm-roberta-flash-implementation--modeling_lora.XLMRobertaLoRA",
|
10 |
+
"AutoModelForMaskedLM": "jinaai/xlm-roberta-flash-implementation--modeling_xlm_roberta.XLMRobertaForMaskedLM",
|
11 |
+
"AutoModelForPreTraining": "jinaai/xlm-roberta-flash-implementation--modeling_xlm_roberta.XLMRobertaForPreTraining"
|
12 |
+
},
|
13 |
+
"bos_token_id": 0,
|
14 |
+
"classifier_dropout": null,
|
15 |
+
"emb_pooler": null,
|
16 |
+
"eos_token_id": 2,
|
17 |
+
"hidden_act": "gelu",
|
18 |
+
"hidden_dropout_prob": 0.1,
|
19 |
+
"hidden_size": 1024,
|
20 |
+
"initializer_range": 0.02,
|
21 |
+
"intermediate_size": 4096,
|
22 |
+
"layer_norm_eps": 1e-05,
|
23 |
+
"load_trained_adapters": true,
|
24 |
+
"lora_adaptations": [
|
25 |
+
"retrieval.query",
|
26 |
+
"retrieval.passage",
|
27 |
+
"separation",
|
28 |
+
"classification",
|
29 |
+
"text-matching"
|
30 |
+
],
|
31 |
+
"lora_alpha": 1,
|
32 |
+
"lora_dropout_p": 0.0,
|
33 |
+
"lora_main_params_trainable": false,
|
34 |
+
"lora_rank": 4,
|
35 |
+
"matryoshka_dimensions": [
|
36 |
+
32,
|
37 |
+
64,
|
38 |
+
128,
|
39 |
+
256,
|
40 |
+
512,
|
41 |
+
768,
|
42 |
+
1024
|
43 |
+
],
|
44 |
+
"max_position_embeddings": 8194,
|
45 |
+
"model_type": "xlm-roberta",
|
46 |
+
"num_attention_heads": 16,
|
47 |
+
"num_hidden_layers": 24,
|
48 |
+
"output_past": true,
|
49 |
+
"pad_token_id": 1,
|
50 |
+
"position_embedding_type": "rotary",
|
51 |
+
"rotary_emb_base": 20000.0,
|
52 |
+
"task_instructions": {
|
53 |
+
"classification": "",
|
54 |
+
"retrieval.passage": "Represent the document for retrieval: ",
|
55 |
+
"retrieval.query": "Represent the query for retrieving evidence documents: ",
|
56 |
+
"separation": "",
|
57 |
+
"text-matching": ""
|
58 |
+
},
|
59 |
+
"torch_dtype": "float32",
|
60 |
+
"transformers_version": "4.44.2",
|
61 |
+
"truncate_dim": null,
|
62 |
+
"type_vocab_size": 1,
|
63 |
+
"use_cache": true,
|
64 |
+
"use_flash_attn": true,
|
65 |
+
"use_reentrant": false,
|
66 |
+
"vocab_size": 250002
|
67 |
+
}
|
0_Transformer/model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:517d8a4c1839ac77a78ff7b0143c326ef1dd0d2dce1ebf51564a8084661d2dfd
|
3 |
+
size 2271064456
|
0_Transformer/sentence_bert_config.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"max_seq_length": 8194,
|
3 |
+
"do_lower_case": false
|
4 |
+
}
|
0_Transformer/special_tokens_map.json
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"cls_token": {
|
10 |
+
"content": "<s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"eos_token": {
|
17 |
+
"content": "</s>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
},
|
23 |
+
"mask_token": {
|
24 |
+
"content": "<mask>",
|
25 |
+
"lstrip": true,
|
26 |
+
"normalized": false,
|
27 |
+
"rstrip": false,
|
28 |
+
"single_word": false
|
29 |
+
},
|
30 |
+
"pad_token": {
|
31 |
+
"content": "<pad>",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false
|
36 |
+
},
|
37 |
+
"sep_token": {
|
38 |
+
"content": "</s>",
|
39 |
+
"lstrip": false,
|
40 |
+
"normalized": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"single_word": false
|
43 |
+
},
|
44 |
+
"unk_token": {
|
45 |
+
"content": "<unk>",
|
46 |
+
"lstrip": false,
|
47 |
+
"normalized": false,
|
48 |
+
"rstrip": false,
|
49 |
+
"single_word": false
|
50 |
+
}
|
51 |
+
}
|
0_Transformer/tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3e19cd8c08f528b481e909f73dbd1fd62b1e8b1117579ba205e477801237f9e0
|
3 |
+
size 17082988
|
0_Transformer/tokenizer_config.json
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"added_tokens_decoder": {
|
3 |
+
"0": {
|
4 |
+
"content": "<s>",
|
5 |
+
"lstrip": false,
|
6 |
+
"normalized": false,
|
7 |
+
"rstrip": false,
|
8 |
+
"single_word": false,
|
9 |
+
"special": true
|
10 |
+
},
|
11 |
+
"1": {
|
12 |
+
"content": "<pad>",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": false,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false,
|
17 |
+
"special": true
|
18 |
+
},
|
19 |
+
"2": {
|
20 |
+
"content": "</s>",
|
21 |
+
"lstrip": false,
|
22 |
+
"normalized": false,
|
23 |
+
"rstrip": false,
|
24 |
+
"single_word": false,
|
25 |
+
"special": true
|
26 |
+
},
|
27 |
+
"3": {
|
28 |
+
"content": "<unk>",
|
29 |
+
"lstrip": false,
|
30 |
+
"normalized": false,
|
31 |
+
"rstrip": false,
|
32 |
+
"single_word": false,
|
33 |
+
"special": true
|
34 |
+
},
|
35 |
+
"250001": {
|
36 |
+
"content": "<mask>",
|
37 |
+
"lstrip": true,
|
38 |
+
"normalized": false,
|
39 |
+
"rstrip": false,
|
40 |
+
"single_word": false,
|
41 |
+
"special": true
|
42 |
+
}
|
43 |
+
},
|
44 |
+
"bos_token": "<s>",
|
45 |
+
"clean_up_tokenization_spaces": true,
|
46 |
+
"cls_token": "<s>",
|
47 |
+
"eos_token": "</s>",
|
48 |
+
"mask_token": "<mask>",
|
49 |
+
"max_length": 8194,
|
50 |
+
"model_max_length": 8194,
|
51 |
+
"pad_to_multiple_of": null,
|
52 |
+
"pad_token": "<pad>",
|
53 |
+
"pad_token_type_id": 0,
|
54 |
+
"padding_side": "right",
|
55 |
+
"sep_token": "</s>",
|
56 |
+
"stride": 0,
|
57 |
+
"tokenizer_class": "XLMRobertaTokenizer",
|
58 |
+
"truncation_side": "right",
|
59 |
+
"truncation_strategy": "longest_first",
|
60 |
+
"unk_token": "<unk>"
|
61 |
+
}
|
1_Pooling/config.json
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"word_embedding_dimension": 1024,
|
3 |
+
"pooling_mode_cls_token": false,
|
4 |
+
"pooling_mode_mean_tokens": true,
|
5 |
+
"pooling_mode_max_tokens": false,
|
6 |
+
"pooling_mode_mean_sqrt_len_tokens": false,
|
7 |
+
"pooling_mode_weightedmean_tokens": false,
|
8 |
+
"pooling_mode_lasttoken": false,
|
9 |
+
"include_prompt": true
|
10 |
+
}
|
README.md
ADDED
@@ -0,0 +1,739 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
base_model: jinaai/jina-embeddings-v3
|
3 |
+
library_name: sentence-transformers
|
4 |
+
pipeline_tag: sentence-similarity
|
5 |
+
tags:
|
6 |
+
- sentence-transformers
|
7 |
+
- sentence-similarity
|
8 |
+
- feature-extraction
|
9 |
+
- generated_from_trainer
|
10 |
+
- dataset_size:498970
|
11 |
+
- loss:BPRLoss
|
12 |
+
widget:
|
13 |
+
- source_sentence: meaning of the prefix em
|
14 |
+
sentences:
|
15 |
+
- Word Origin and History for em- Expand. from French assimilation of en- to following
|
16 |
+
labial (see en- (1)). Also a prefix used to form verbs from adjectives and nouns.
|
17 |
+
representing Latin ex- assimilated to following -m- (see ex-).
|
18 |
+
- 'Hawaii: Aloha! Whether you are hoping to travel to Hawaii for a tropical green
|
19 |
+
Christmas or you are hoping to make this island paradise your home, we can help
|
20 |
+
you find the information you need! The state of Hawaii, located in the middle
|
21 |
+
of the Pacific Ocean, is farther away from any other landmass than any other island
|
22 |
+
on the earth.'
|
23 |
+
- 'Prefixes: Un, Dis, Im, Mis. A prefix is placed at the beginning of a word to
|
24 |
+
change its meaning. For example, the suffix re- means either again or back as
|
25 |
+
in return, repeat or refurbish. The following 4 prefixes are easy to confuse because
|
26 |
+
they all have a negative meaning. un-.'
|
27 |
+
- source_sentence: how long does engine take to cool down
|
28 |
+
sentences:
|
29 |
+
- It takes roughly 30 minutes for the laptop to cool down to a normal state.Or if
|
30 |
+
you want to use it soon it could take I guess 10-15 minutes.
|
31 |
+
- "Turn off the engine. If you can pop the hood from the driverâ\x80\x99s seat,\
|
32 |
+
\ do so â\x80\x94 but donâ\x80\x99t risk opening it by hand until the engine has\
|
33 |
+
\ cooled, especially if you see steam wafting off the engine. It typically takes\
|
34 |
+
\ a solid 30 minutes for an engine to cool down enough for it to be safe to handle."
|
35 |
+
- Zeppelin was invented in 1900 by a military officer of German origin named Count
|
36 |
+
Ferdinand von Zeppelin.It was a stiff framed airship, LZ-I that flew on 2nd July,
|
37 |
+
1900 carrying five passengers near Lake Constance in Germany. Zeppelins were used
|
38 |
+
in the times of peace as well as war.eppelin was invented in 1900 by a military
|
39 |
+
officer of German origin named Count Ferdinand von Zeppelin.
|
40 |
+
- source_sentence: how long does it take to get an undergraduate
|
41 |
+
sentences:
|
42 |
+
- How Long Does It Take To Become a Nurse Anesthetist (CRNA)? How Long Does It Take
|
43 |
+
To Become a Nurse Practitioner? How Long Does It Take To Become a Nutritionist?
|
44 |
+
How Long Does It Take To Become A Pharmacist? How Long Does It Take To Become
|
45 |
+
a Physician Assistant? How Long Does It Take To Become a Social Worker? (ANSWERED)
|
46 |
+
How Long Does It Take To Become a Vet Tech? How Long Does It Take To Become An
|
47 |
+
LPN? How Long Does It Take To Become an OB/GYN? How Long Does It Take To Become
|
48 |
+
an Ultrasound Technician? How Long Does It Take To Get a Medical Degree? How Long
|
49 |
+
Does It Take To Get a Nursing Degree? Your first stepping stone toward a rewarding
|
50 |
+
nursing career is completing the education and becoming registered. Ill answer
|
51 |
+
the age old question about how long it takes to get a registered nursing degree.
|
52 |
+
- A depositary receipt (DR) is a type of negotiable (transferable) financial security
|
53 |
+
that is traded on a local stock exchange but represents a security, usually in
|
54 |
+
the form of equity, that is issued by a foreign publicly listed company. U.S.
|
55 |
+
broker may also sell ADRs back into the local Russian market. This is known as
|
56 |
+
cross-border trading. When this happens, an amount of ADRs is canceled by the
|
57 |
+
depository and the local shares are released from the custodian bank and delivered
|
58 |
+
back to the Russian broker who bought them.
|
59 |
+
- Undergraduate Studies. To become a doctor, a student must first complete high
|
60 |
+
school, then go on to college. During the typical four-year undergraduate period,
|
61 |
+
the aspiring doctor will study topics such as anatomy, physiology, biology, chemistry
|
62 |
+
and other college courses necessary for a degree, such as English or math.
|
63 |
+
- source_sentence: fees definition
|
64 |
+
sentences:
|
65 |
+
- fees. 1 veterinarians' charges rendered to clients for services. 2 Justifiable
|
66 |
+
professional fees are based on the amount of time spent on the case, with a varying
|
67 |
+
fee per hour depending on the difficulty and complexity of the problem, and on
|
68 |
+
the specialist superiority of the veterinarian.
|
69 |
+
- 'Summary: The Catbird Seat by James Thurber is about Mr. Martin who has decided
|
70 |
+
he must kill Mrs Barrows because she is destroying the firm he works for, but
|
71 |
+
in the end he tricks his boss into thinking she has had a mental breakdown.'
|
72 |
+
- Cost, in common usage, the monetary value of goods and services that producers
|
73 |
+
and consumers purchase. In a basic economic sense, cost is the measure of the
|
74 |
+
alternative opportunities foregone in the choice of one good or activity over
|
75 |
+
others.
|
76 |
+
- source_sentence: what is a fermentation lock used for
|
77 |
+
sentences:
|
78 |
+
- "Remember, fermentation is a method of preserving food. Leaving it on your counter\
|
79 |
+
\ gives it more time for the LAB activity to increase â\x80\x94 which, in turn,\
|
80 |
+
\ lowers pH â\x80\x94 and prevents spoilage. As long as your jar can keep out\
|
81 |
+
\ the oxygen, you shouldnâ\x80\x99t be worried. Which leads me toâ\x80¦."
|
82 |
+
- The fermentation lock or airlock is a device used in beer brewing and wine making
|
83 |
+
that allows carbon dioxide released by the beer to escape the fermenter, while
|
84 |
+
not allowing air to enter the fermenter, thus avoiding oxidation. There are two
|
85 |
+
main designs for the fermentation lock, or airlock.
|
86 |
+
- The New River is formed by the confluence of the South Fork New River and the
|
87 |
+
North Fork New River in Ashe County, North Carolina. It then flows north into
|
88 |
+
southwestern Virginia, passing near Galax, Virginia and through a gorge in the
|
89 |
+
Iron Mountains. Continuing north, the river enters Pulaski County, Virginia, where
|
90 |
+
it is impounded by Claytor Dam, creating Claytor Lake.
|
91 |
+
---
|
92 |
+
|
93 |
+
# SentenceTransformer based on jinaai/jina-embeddings-v3
|
94 |
+
|
95 |
+
This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [jinaai/jina-embeddings-v3](https://huggingface.co/jinaai/jina-embeddings-v3). It maps sentences & paragraphs to a 1024-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
|
96 |
+
|
97 |
+
## Model Details
|
98 |
+
|
99 |
+
### Model Description
|
100 |
+
- **Model Type:** Sentence Transformer
|
101 |
+
- **Base model:** [jinaai/jina-embeddings-v3](https://huggingface.co/jinaai/jina-embeddings-v3) <!-- at revision 4be32c2f5d65b95e4bcce473545b7883ec8d2edd -->
|
102 |
+
- **Maximum Sequence Length:** 8194 tokens
|
103 |
+
- **Output Dimensionality:** 1024 tokens
|
104 |
+
- **Similarity Function:** Cosine Similarity
|
105 |
+
<!-- - **Training Dataset:** Unknown -->
|
106 |
+
<!-- - **Language:** Unknown -->
|
107 |
+
<!-- - **License:** Unknown -->
|
108 |
+
|
109 |
+
### Model Sources
|
110 |
+
|
111 |
+
- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
|
112 |
+
- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
|
113 |
+
- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
|
114 |
+
|
115 |
+
### Full Model Architecture
|
116 |
+
|
117 |
+
```
|
118 |
+
SentenceTransformer(
|
119 |
+
(transformer): Transformer(
|
120 |
+
(auto_model): XLMRobertaLoRA(
|
121 |
+
(roberta): XLMRobertaModel(
|
122 |
+
(embeddings): XLMRobertaEmbeddings(
|
123 |
+
(word_embeddings): ParametrizedEmbedding(
|
124 |
+
250002, 1024, padding_idx=1
|
125 |
+
(parametrizations): ModuleDict(
|
126 |
+
(weight): ParametrizationList(
|
127 |
+
(0): LoRAParametrization()
|
128 |
+
)
|
129 |
+
)
|
130 |
+
)
|
131 |
+
(token_type_embeddings): ParametrizedEmbedding(
|
132 |
+
1, 1024
|
133 |
+
(parametrizations): ModuleDict(
|
134 |
+
(weight): ParametrizationList(
|
135 |
+
(0): LoRAParametrization()
|
136 |
+
)
|
137 |
+
)
|
138 |
+
)
|
139 |
+
)
|
140 |
+
(emb_drop): Dropout(p=0.1, inplace=False)
|
141 |
+
(emb_ln): LayerNorm((1024,), eps=1e-05, elementwise_affine=True)
|
142 |
+
(encoder): XLMRobertaEncoder(
|
143 |
+
(layers): ModuleList(
|
144 |
+
(0-23): 24 x Block(
|
145 |
+
(mixer): MHA(
|
146 |
+
(rotary_emb): RotaryEmbedding()
|
147 |
+
(Wqkv): ParametrizedLinearResidual(
|
148 |
+
in_features=1024, out_features=3072, bias=True
|
149 |
+
(parametrizations): ModuleDict(
|
150 |
+
(weight): ParametrizationList(
|
151 |
+
(0): LoRAParametrization()
|
152 |
+
)
|
153 |
+
)
|
154 |
+
)
|
155 |
+
(inner_attn): FlashSelfAttention(
|
156 |
+
(drop): Dropout(p=0.1, inplace=False)
|
157 |
+
)
|
158 |
+
(inner_cross_attn): FlashCrossAttention(
|
159 |
+
(drop): Dropout(p=0.1, inplace=False)
|
160 |
+
)
|
161 |
+
(out_proj): ParametrizedLinear(
|
162 |
+
in_features=1024, out_features=1024, bias=True
|
163 |
+
(parametrizations): ModuleDict(
|
164 |
+
(weight): ParametrizationList(
|
165 |
+
(0): LoRAParametrization()
|
166 |
+
)
|
167 |
+
)
|
168 |
+
)
|
169 |
+
)
|
170 |
+
(dropout1): Dropout(p=0.1, inplace=False)
|
171 |
+
(drop_path1): StochasticDepth(p=0.0, mode=row)
|
172 |
+
(norm1): LayerNorm((1024,), eps=1e-05, elementwise_affine=True)
|
173 |
+
(mlp): Mlp(
|
174 |
+
(fc1): ParametrizedLinear(
|
175 |
+
in_features=1024, out_features=4096, bias=True
|
176 |
+
(parametrizations): ModuleDict(
|
177 |
+
(weight): ParametrizationList(
|
178 |
+
(0): LoRAParametrization()
|
179 |
+
)
|
180 |
+
)
|
181 |
+
)
|
182 |
+
(fc2): ParametrizedLinear(
|
183 |
+
in_features=4096, out_features=1024, bias=True
|
184 |
+
(parametrizations): ModuleDict(
|
185 |
+
(weight): ParametrizationList(
|
186 |
+
(0): LoRAParametrization()
|
187 |
+
)
|
188 |
+
)
|
189 |
+
)
|
190 |
+
)
|
191 |
+
(dropout2): Dropout(p=0.1, inplace=False)
|
192 |
+
(drop_path2): StochasticDepth(p=0.0, mode=row)
|
193 |
+
(norm2): LayerNorm((1024,), eps=1e-05, elementwise_affine=True)
|
194 |
+
)
|
195 |
+
)
|
196 |
+
)
|
197 |
+
(pooler): XLMRobertaPooler(
|
198 |
+
(dense): ParametrizedLinear(
|
199 |
+
in_features=1024, out_features=1024, bias=True
|
200 |
+
(parametrizations): ModuleDict(
|
201 |
+
(weight): ParametrizationList(
|
202 |
+
(0): LoRAParametrization()
|
203 |
+
)
|
204 |
+
)
|
205 |
+
)
|
206 |
+
(activation): Tanh()
|
207 |
+
)
|
208 |
+
)
|
209 |
+
)
|
210 |
+
)
|
211 |
+
(pooler): Pooling({'word_embedding_dimension': 1024, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
|
212 |
+
(normalizer): Normalize()
|
213 |
+
)
|
214 |
+
```
|
215 |
+
|
216 |
+
## Usage
|
217 |
+
|
218 |
+
### Direct Usage (Sentence Transformers)
|
219 |
+
|
220 |
+
First install the Sentence Transformers library:
|
221 |
+
|
222 |
+
```bash
|
223 |
+
pip install -U sentence-transformers
|
224 |
+
```
|
225 |
+
|
226 |
+
Then you can load this model and run inference.
|
227 |
+
```python
|
228 |
+
from sentence_transformers import SentenceTransformer
|
229 |
+
|
230 |
+
# Download from the 🤗 Hub
|
231 |
+
model = SentenceTransformer("BlackBeenie/jina-embeddings-v3-msmarco-v3-bpr")
|
232 |
+
# Run inference
|
233 |
+
sentences = [
|
234 |
+
'what is a fermentation lock used for',
|
235 |
+
'The fermentation lock or airlock is a device used in beer brewing and wine making that allows carbon dioxide released by the beer to escape the fermenter, while not allowing air to enter the fermenter, thus avoiding oxidation. There are two main designs for the fermentation lock, or airlock.',
|
236 |
+
'Remember, fermentation is a method of preserving food. Leaving it on your counter gives it more time for the LAB activity to increase â\x80\x94 which, in turn, lowers pH â\x80\x94 and prevents spoilage. As long as your jar can keep out the oxygen, you shouldnâ\x80\x99t be worried. Which leads me toâ\x80¦.',
|
237 |
+
]
|
238 |
+
embeddings = model.encode(sentences)
|
239 |
+
print(embeddings.shape)
|
240 |
+
# [3, 1024]
|
241 |
+
|
242 |
+
# Get the similarity scores for the embeddings
|
243 |
+
similarities = model.similarity(embeddings, embeddings)
|
244 |
+
print(similarities.shape)
|
245 |
+
# [3, 3]
|
246 |
+
```
|
247 |
+
|
248 |
+
<!--
|
249 |
+
### Direct Usage (Transformers)
|
250 |
+
|
251 |
+
<details><summary>Click to see the direct usage in Transformers</summary>
|
252 |
+
|
253 |
+
</details>
|
254 |
+
-->
|
255 |
+
|
256 |
+
<!--
|
257 |
+
### Downstream Usage (Sentence Transformers)
|
258 |
+
|
259 |
+
You can finetune this model on your own dataset.
|
260 |
+
|
261 |
+
<details><summary>Click to expand</summary>
|
262 |
+
|
263 |
+
</details>
|
264 |
+
-->
|
265 |
+
|
266 |
+
<!--
|
267 |
+
### Out-of-Scope Use
|
268 |
+
|
269 |
+
*List how the model may foreseeably be misused and address what users ought not to do with the model.*
|
270 |
+
-->
|
271 |
+
|
272 |
+
<!--
|
273 |
+
## Bias, Risks and Limitations
|
274 |
+
|
275 |
+
*What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
|
276 |
+
-->
|
277 |
+
|
278 |
+
<!--
|
279 |
+
### Recommendations
|
280 |
+
|
281 |
+
*What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
|
282 |
+
-->
|
283 |
+
|
284 |
+
## Training Details
|
285 |
+
|
286 |
+
### Training Dataset
|
287 |
+
|
288 |
+
#### Unnamed Dataset
|
289 |
+
|
290 |
+
|
291 |
+
* Size: 498,970 training samples
|
292 |
+
* Columns: <code>sentence_0</code>, <code>sentence_1</code>, and <code>sentence_2</code>
|
293 |
+
* Approximate statistics based on the first 1000 samples:
|
294 |
+
| | sentence_0 | sentence_1 | sentence_2 |
|
295 |
+
|:--------|:---------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
|
296 |
+
| type | string | string | string |
|
297 |
+
| details | <ul><li>min: 4 tokens</li><li>mean: 9.93 tokens</li><li>max: 37 tokens</li></ul> | <ul><li>min: 17 tokens</li><li>mean: 90.01 tokens</li><li>max: 239 tokens</li></ul> | <ul><li>min: 23 tokens</li><li>mean: 88.24 tokens</li><li>max: 258 tokens</li></ul> |
|
298 |
+
* Samples:
|
299 |
+
| sentence_0 | sentence_1 | sentence_2 |
|
300 |
+
|:-------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
301 |
+
| <code>how much does it cost to paint a interior house</code> | <code>Interior House Painting Cost Factors. Generally, it will take a minimum of two gallons of paint to cover a room. At the highest end, paint will cost anywhere between $30 and $60 per gallon and come in three different finishes: flat, semi-gloss or high-gloss.Flat finishes are the least shiny and are best suited for areas requiring frequent cleaning.rovide a few details about your project and receive competitive quotes from local pros. The average national cost to paint a home interior is $1,671, with most homeowners spending between $966 and $2,426.</code> | <code>Question DetailsAsked on 3/12/2014. Guest_... How much does it cost per square foot to paint the interior of a house? We just bought roughly a 1500 sg ft townhouse and want to get the entire house, including ceilings painted (including a roughly 400 sq ft finished basement not included in square footage).</code> |
|
302 |
+
| <code>when is s corp taxes due</code> | <code>If you form a corporate entity for your small business, regardless of whether it's taxed as a C or S corporation, a tax return must be filed with the Internal Revenue Service on its due date each year. Corporate tax returns are always due on the 15th day of the third month following the close of the tax year. The actual day that the tax return filing deadline falls on, however, isn't the same for every corporation.</code> | <code>Before Jan. 1, 2026 After Dec. 31, 2025 Starting with 2016 tax returns, all. other C corps besides Dec. 31 and. June 30 year-ends (including those with. other fiscal year-ends) will be due on. the 15th of the 4th month after the.</code> |
|
303 |
+
| <code>what are disaccharides</code> | <code>Disaccharides are formed when two monosaccharides are joined together and a molecule of water is removed, a process known as dehydration reaction. For example; milk sugar (lactose) is made from glucose and galactose whereas the sugar from sugar cane and sugar beets (sucrose) is made from glucose and fructose.altose, another notable disaccharide, is made up of two glucose molecules. The two monosaccharides are bonded via a dehydration reaction (also called a condensation reaction or dehydration synthesis) that leads to the loss of a molecule of water and formation of a glycosidic bond.</code> | <code>Disaccharides- Another type of carbohydrate. How many sugar units are disaccharides composed of?_____ What elements make up disaccharides? _____ How does the body use disaccharides? _____ There is no chemical test for disaccharides. Table sugar (white granulated sugar) is an example of a disaccharide. List some foods that contain a lot of disaccharides: _____</code> |
|
304 |
+
* Loss: <code>beir.losses.bpr_loss.BPRLoss</code>
|
305 |
+
|
306 |
+
### Training Hyperparameters
|
307 |
+
#### Non-Default Hyperparameters
|
308 |
+
|
309 |
+
- `eval_strategy`: steps
|
310 |
+
- `per_device_train_batch_size`: 32
|
311 |
+
- `per_device_eval_batch_size`: 32
|
312 |
+
- `num_train_epochs`: 8
|
313 |
+
- `multi_dataset_batch_sampler`: round_robin
|
314 |
+
|
315 |
+
#### All Hyperparameters
|
316 |
+
<details><summary>Click to expand</summary>
|
317 |
+
|
318 |
+
- `overwrite_output_dir`: False
|
319 |
+
- `do_predict`: False
|
320 |
+
- `eval_strategy`: steps
|
321 |
+
- `prediction_loss_only`: True
|
322 |
+
- `per_device_train_batch_size`: 32
|
323 |
+
- `per_device_eval_batch_size`: 32
|
324 |
+
- `per_gpu_train_batch_size`: None
|
325 |
+
- `per_gpu_eval_batch_size`: None
|
326 |
+
- `gradient_accumulation_steps`: 1
|
327 |
+
- `eval_accumulation_steps`: None
|
328 |
+
- `torch_empty_cache_steps`: None
|
329 |
+
- `learning_rate`: 5e-05
|
330 |
+
- `weight_decay`: 0.0
|
331 |
+
- `adam_beta1`: 0.9
|
332 |
+
- `adam_beta2`: 0.999
|
333 |
+
- `adam_epsilon`: 1e-08
|
334 |
+
- `max_grad_norm`: 1
|
335 |
+
- `num_train_epochs`: 8
|
336 |
+
- `max_steps`: -1
|
337 |
+
- `lr_scheduler_type`: linear
|
338 |
+
- `lr_scheduler_kwargs`: {}
|
339 |
+
- `warmup_ratio`: 0.0
|
340 |
+
- `warmup_steps`: 0
|
341 |
+
- `log_level`: passive
|
342 |
+
- `log_level_replica`: warning
|
343 |
+
- `log_on_each_node`: True
|
344 |
+
- `logging_nan_inf_filter`: True
|
345 |
+
- `save_safetensors`: True
|
346 |
+
- `save_on_each_node`: False
|
347 |
+
- `save_only_model`: False
|
348 |
+
- `restore_callback_states_from_checkpoint`: False
|
349 |
+
- `no_cuda`: False
|
350 |
+
- `use_cpu`: False
|
351 |
+
- `use_mps_device`: False
|
352 |
+
- `seed`: 42
|
353 |
+
- `data_seed`: None
|
354 |
+
- `jit_mode_eval`: False
|
355 |
+
- `use_ipex`: False
|
356 |
+
- `bf16`: False
|
357 |
+
- `fp16`: False
|
358 |
+
- `fp16_opt_level`: O1
|
359 |
+
- `half_precision_backend`: auto
|
360 |
+
- `bf16_full_eval`: False
|
361 |
+
- `fp16_full_eval`: False
|
362 |
+
- `tf32`: None
|
363 |
+
- `local_rank`: 0
|
364 |
+
- `ddp_backend`: None
|
365 |
+
- `tpu_num_cores`: None
|
366 |
+
- `tpu_metrics_debug`: False
|
367 |
+
- `debug`: []
|
368 |
+
- `dataloader_drop_last`: False
|
369 |
+
- `dataloader_num_workers`: 0
|
370 |
+
- `dataloader_prefetch_factor`: None
|
371 |
+
- `past_index`: -1
|
372 |
+
- `disable_tqdm`: False
|
373 |
+
- `remove_unused_columns`: True
|
374 |
+
- `label_names`: None
|
375 |
+
- `load_best_model_at_end`: False
|
376 |
+
- `ignore_data_skip`: False
|
377 |
+
- `fsdp`: []
|
378 |
+
- `fsdp_min_num_params`: 0
|
379 |
+
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
|
380 |
+
- `fsdp_transformer_layer_cls_to_wrap`: None
|
381 |
+
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
|
382 |
+
- `deepspeed`: None
|
383 |
+
- `label_smoothing_factor`: 0.0
|
384 |
+
- `optim`: adamw_torch
|
385 |
+
- `optim_args`: None
|
386 |
+
- `adafactor`: False
|
387 |
+
- `group_by_length`: False
|
388 |
+
- `length_column_name`: length
|
389 |
+
- `ddp_find_unused_parameters`: None
|
390 |
+
- `ddp_bucket_cap_mb`: None
|
391 |
+
- `ddp_broadcast_buffers`: False
|
392 |
+
- `dataloader_pin_memory`: True
|
393 |
+
- `dataloader_persistent_workers`: False
|
394 |
+
- `skip_memory_metrics`: True
|
395 |
+
- `use_legacy_prediction_loop`: False
|
396 |
+
- `push_to_hub`: False
|
397 |
+
- `resume_from_checkpoint`: None
|
398 |
+
- `hub_model_id`: None
|
399 |
+
- `hub_strategy`: every_save
|
400 |
+
- `hub_private_repo`: False
|
401 |
+
- `hub_always_push`: False
|
402 |
+
- `gradient_checkpointing`: False
|
403 |
+
- `gradient_checkpointing_kwargs`: None
|
404 |
+
- `include_inputs_for_metrics`: False
|
405 |
+
- `eval_do_concat_batches`: True
|
406 |
+
- `fp16_backend`: auto
|
407 |
+
- `push_to_hub_model_id`: None
|
408 |
+
- `push_to_hub_organization`: None
|
409 |
+
- `mp_parameters`:
|
410 |
+
- `auto_find_batch_size`: False
|
411 |
+
- `full_determinism`: False
|
412 |
+
- `torchdynamo`: None
|
413 |
+
- `ray_scope`: last
|
414 |
+
- `ddp_timeout`: 1800
|
415 |
+
- `torch_compile`: False
|
416 |
+
- `torch_compile_backend`: None
|
417 |
+
- `torch_compile_mode`: None
|
418 |
+
- `dispatch_batches`: None
|
419 |
+
- `split_batches`: None
|
420 |
+
- `include_tokens_per_second`: False
|
421 |
+
- `include_num_input_tokens_seen`: False
|
422 |
+
- `neftune_noise_alpha`: None
|
423 |
+
- `optim_target_modules`: None
|
424 |
+
- `batch_eval_metrics`: False
|
425 |
+
- `eval_on_start`: False
|
426 |
+
- `eval_use_gather_object`: False
|
427 |
+
- `batch_sampler`: batch_sampler
|
428 |
+
- `multi_dataset_batch_sampler`: round_robin
|
429 |
+
|
430 |
+
</details>
|
431 |
+
|
432 |
+
### Training Logs
|
433 |
+
<details><summary>Click to expand</summary>
|
434 |
+
|
435 |
+
| Epoch | Step | Training Loss |
|
436 |
+
|:------:|:------:|:-------------:|
|
437 |
+
| 0.0321 | 500 | 1.7204 |
|
438 |
+
| 0.0641 | 1000 | 0.6847 |
|
439 |
+
| 0.0962 | 1500 | 0.4782 |
|
440 |
+
| 0.1283 | 2000 | 0.4001 |
|
441 |
+
| 0.1603 | 2500 | 0.3773 |
|
442 |
+
| 0.1924 | 3000 | 0.3538 |
|
443 |
+
| 0.2245 | 3500 | 0.3424 |
|
444 |
+
| 0.2565 | 4000 | 0.3375 |
|
445 |
+
| 0.2886 | 4500 | 0.3286 |
|
446 |
+
| 0.3207 | 5000 | 0.3289 |
|
447 |
+
| 0.3527 | 5500 | 0.3266 |
|
448 |
+
| 0.3848 | 6000 | 0.3226 |
|
449 |
+
| 0.4169 | 6500 | 0.3266 |
|
450 |
+
| 0.4489 | 7000 | 0.3262 |
|
451 |
+
| 0.4810 | 7500 | 0.3241 |
|
452 |
+
| 0.5131 | 8000 | 0.3216 |
|
453 |
+
| 0.5451 | 8500 | 0.3232 |
|
454 |
+
| 0.5772 | 9000 | 0.3186 |
|
455 |
+
| 0.6092 | 9500 | 0.3194 |
|
456 |
+
| 0.6413 | 10000 | 0.314 |
|
457 |
+
| 0.6734 | 10500 | 0.3217 |
|
458 |
+
| 0.7054 | 11000 | 0.3156 |
|
459 |
+
| 0.7375 | 11500 | 0.3244 |
|
460 |
+
| 0.7696 | 12000 | 0.3189 |
|
461 |
+
| 0.8016 | 12500 | 0.3235 |
|
462 |
+
| 0.8337 | 13000 | 0.3305 |
|
463 |
+
| 0.8658 | 13500 | 0.3284 |
|
464 |
+
| 0.8978 | 14000 | 0.3213 |
|
465 |
+
| 0.9299 | 14500 | 0.3283 |
|
466 |
+
| 0.9620 | 15000 | 0.3219 |
|
467 |
+
| 0.9940 | 15500 | 0.3247 |
|
468 |
+
| 1.0 | 15593 | - |
|
469 |
+
| 1.0261 | 16000 | 0.3287 |
|
470 |
+
| 1.0582 | 16500 | 0.3346 |
|
471 |
+
| 1.0902 | 17000 | 0.3245 |
|
472 |
+
| 1.1223 | 17500 | 0.3202 |
|
473 |
+
| 1.1544 | 18000 | 0.332 |
|
474 |
+
| 1.1864 | 18500 | 0.3298 |
|
475 |
+
| 1.2185 | 19000 | 0.332 |
|
476 |
+
| 1.2506 | 19500 | 0.3258 |
|
477 |
+
| 1.2826 | 20000 | 0.3291 |
|
478 |
+
| 1.3147 | 20500 | 0.334 |
|
479 |
+
| 1.3468 | 21000 | 0.3328 |
|
480 |
+
| 1.3788 | 21500 | 0.3362 |
|
481 |
+
| 1.4109 | 22000 | 0.3348 |
|
482 |
+
| 1.4430 | 22500 | 0.3402 |
|
483 |
+
| 1.4750 | 23000 | 0.3346 |
|
484 |
+
| 1.5071 | 23500 | 0.339 |
|
485 |
+
| 1.5392 | 24000 | 0.3406 |
|
486 |
+
| 1.5712 | 24500 | 0.3239 |
|
487 |
+
| 1.6033 | 25000 | 0.3275 |
|
488 |
+
| 1.6353 | 25500 | 0.3287 |
|
489 |
+
| 1.6674 | 26000 | 0.3271 |
|
490 |
+
| 1.6995 | 26500 | 0.3337 |
|
491 |
+
| 1.7315 | 27000 | 0.3352 |
|
492 |
+
| 1.7636 | 27500 | 0.3244 |
|
493 |
+
| 1.7957 | 28000 | 0.3418 |
|
494 |
+
| 1.8277 | 28500 | 0.349 |
|
495 |
+
| 1.8598 | 29000 | 0.3395 |
|
496 |
+
| 1.8919 | 29500 | 0.3386 |
|
497 |
+
| 1.9239 | 30000 | 0.3379 |
|
498 |
+
| 1.9560 | 30500 | 0.3412 |
|
499 |
+
| 1.9881 | 31000 | 0.3364 |
|
500 |
+
| 2.0 | 31186 | - |
|
501 |
+
| 2.0201 | 31500 | 0.3386 |
|
502 |
+
| 2.0522 | 32000 | 0.3417 |
|
503 |
+
| 2.0843 | 32500 | 0.3362 |
|
504 |
+
| 2.1163 | 33000 | 0.3251 |
|
505 |
+
| 2.1484 | 33500 | 0.3563 |
|
506 |
+
| 2.1805 | 34000 | 0.3341 |
|
507 |
+
| 2.2125 | 34500 | 0.3478 |
|
508 |
+
| 2.2446 | 35000 | 0.3389 |
|
509 |
+
| 2.2767 | 35500 | 0.342 |
|
510 |
+
| 2.3087 | 36000 | 0.3467 |
|
511 |
+
| 2.3408 | 36500 | 0.3419 |
|
512 |
+
| 2.3729 | 37000 | 0.3513 |
|
513 |
+
| 2.4049 | 37500 | 0.3441 |
|
514 |
+
| 2.4370 | 38000 | 0.3484 |
|
515 |
+
| 2.4691 | 38500 | 0.3457 |
|
516 |
+
| 2.5011 | 39000 | 0.3503 |
|
517 |
+
| 2.5332 | 39500 | 0.3446 |
|
518 |
+
| 2.5653 | 40000 | 0.3461 |
|
519 |
+
| 2.5973 | 40500 | 0.3399 |
|
520 |
+
| 2.6294 | 41000 | 0.3405 |
|
521 |
+
| 2.6615 | 41500 | 0.3382 |
|
522 |
+
| 2.6935 | 42000 | 0.3388 |
|
523 |
+
| 2.7256 | 42500 | 0.3378 |
|
524 |
+
| 2.7576 | 43000 | 0.336 |
|
525 |
+
| 2.7897 | 43500 | 0.3471 |
|
526 |
+
| 2.8218 | 44000 | 0.3563 |
|
527 |
+
| 2.8538 | 44500 | 0.3465 |
|
528 |
+
| 2.8859 | 45000 | 0.3501 |
|
529 |
+
| 2.9180 | 45500 | 0.3439 |
|
530 |
+
| 2.9500 | 46000 | 0.3546 |
|
531 |
+
| 2.9821 | 46500 | 0.3414 |
|
532 |
+
| 3.0 | 46779 | - |
|
533 |
+
| 3.0142 | 47000 | 0.3498 |
|
534 |
+
| 3.0462 | 47500 | 0.3484 |
|
535 |
+
| 3.0783 | 48000 | 0.3496 |
|
536 |
+
| 3.1104 | 48500 | 0.3392 |
|
537 |
+
| 3.1424 | 49000 | 0.3583 |
|
538 |
+
| 3.1745 | 49500 | 0.3505 |
|
539 |
+
| 3.2066 | 50000 | 0.3547 |
|
540 |
+
| 3.2386 | 50500 | 0.3469 |
|
541 |
+
| 3.2707 | 51000 | 0.3489 |
|
542 |
+
| 3.3028 | 51500 | 0.3473 |
|
543 |
+
| 3.3348 | 52000 | 0.3579 |
|
544 |
+
| 3.3669 | 52500 | 0.3523 |
|
545 |
+
| 3.3990 | 53000 | 0.3427 |
|
546 |
+
| 3.4310 | 53500 | 0.3685 |
|
547 |
+
| 3.4631 | 54000 | 0.3479 |
|
548 |
+
| 3.4952 | 54500 | 0.355 |
|
549 |
+
| 3.5272 | 55000 | 0.3464 |
|
550 |
+
| 3.5593 | 55500 | 0.3473 |
|
551 |
+
| 3.5914 | 56000 | 0.348 |
|
552 |
+
| 3.6234 | 56500 | 0.3426 |
|
553 |
+
| 3.6555 | 57000 | 0.3394 |
|
554 |
+
| 3.6876 | 57500 | 0.3454 |
|
555 |
+
| 3.7196 | 58000 | 0.345 |
|
556 |
+
| 3.7517 | 58500 | 0.3411 |
|
557 |
+
| 3.7837 | 59000 | 0.3557 |
|
558 |
+
| 3.8158 | 59500 | 0.3505 |
|
559 |
+
| 3.8479 | 60000 | 0.3605 |
|
560 |
+
| 3.8799 | 60500 | 0.3554 |
|
561 |
+
| 3.9120 | 61000 | 0.349 |
|
562 |
+
| 3.9441 | 61500 | 0.3629 |
|
563 |
+
| 3.9761 | 62000 | 0.3456 |
|
564 |
+
| 4.0 | 62372 | - |
|
565 |
+
| 4.0082 | 62500 | 0.3562 |
|
566 |
+
| 4.0403 | 63000 | 0.3531 |
|
567 |
+
| 4.0723 | 63500 | 0.3569 |
|
568 |
+
| 4.1044 | 64000 | 0.3494 |
|
569 |
+
| 4.1365 | 64500 | 0.3513 |
|
570 |
+
| 4.1685 | 65000 | 0.3599 |
|
571 |
+
| 4.2006 | 65500 | 0.3487 |
|
572 |
+
| 4.2327 | 66000 | 0.3561 |
|
573 |
+
| 4.2647 | 66500 | 0.3583 |
|
574 |
+
| 4.2968 | 67000 | 0.3539 |
|
575 |
+
| 4.3289 | 67500 | 0.3614 |
|
576 |
+
| 4.3609 | 68000 | 0.3558 |
|
577 |
+
| 4.3930 | 68500 | 0.3485 |
|
578 |
+
| 4.4251 | 69000 | 0.3715 |
|
579 |
+
| 4.4571 | 69500 | 0.3585 |
|
580 |
+
| 4.4892 | 70000 | 0.3571 |
|
581 |
+
| 4.5213 | 70500 | 0.3498 |
|
582 |
+
| 4.5533 | 71000 | 0.3576 |
|
583 |
+
| 4.5854 | 71500 | 0.3498 |
|
584 |
+
| 4.6175 | 72000 | 0.3507 |
|
585 |
+
| 4.6495 | 72500 | 0.3436 |
|
586 |
+
| 4.6816 | 73000 | 0.3461 |
|
587 |
+
| 4.7137 | 73500 | 0.3451 |
|
588 |
+
| 4.7457 | 74000 | 0.3554 |
|
589 |
+
| 4.7778 | 74500 | 0.354 |
|
590 |
+
| 4.8099 | 75000 | 0.3514 |
|
591 |
+
| 4.8419 | 75500 | 0.3688 |
|
592 |
+
| 4.8740 | 76000 | 0.3573 |
|
593 |
+
| 4.9060 | 76500 | 0.3557 |
|
594 |
+
| 4.9381 | 77000 | 0.3607 |
|
595 |
+
| 4.9702 | 77500 | 0.3488 |
|
596 |
+
| 5.0 | 77965 | - |
|
597 |
+
| 5.0022 | 78000 | 0.3555 |
|
598 |
+
| 5.0343 | 78500 | 0.3596 |
|
599 |
+
| 5.0664 | 79000 | 0.3572 |
|
600 |
+
| 5.0984 | 79500 | 0.355 |
|
601 |
+
| 5.1305 | 80000 | 0.3427 |
|
602 |
+
| 5.1626 | 80500 | 0.3669 |
|
603 |
+
| 5.1946 | 81000 | 0.3578 |
|
604 |
+
| 5.2267 | 81500 | 0.3589 |
|
605 |
+
| 5.2588 | 82000 | 0.3586 |
|
606 |
+
| 5.2908 | 82500 | 0.3581 |
|
607 |
+
| 5.3229 | 83000 | 0.3607 |
|
608 |
+
| 5.3550 | 83500 | 0.3563 |
|
609 |
+
| 5.3870 | 84000 | 0.3597 |
|
610 |
+
| 5.4191 | 84500 | 0.3712 |
|
611 |
+
| 5.4512 | 85000 | 0.3574 |
|
612 |
+
| 5.4832 | 85500 | 0.359 |
|
613 |
+
| 5.5153 | 86000 | 0.3598 |
|
614 |
+
| 5.5474 | 86500 | 0.3604 |
|
615 |
+
| 5.5794 | 87000 | 0.3535 |
|
616 |
+
| 5.6115 | 87500 | 0.3606 |
|
617 |
+
| 5.6436 | 88000 | 0.3469 |
|
618 |
+
| 5.6756 | 88500 | 0.3568 |
|
619 |
+
| 5.7077 | 89000 | 0.3497 |
|
620 |
+
| 5.7398 | 89500 | 0.3597 |
|
621 |
+
| 5.7718 | 90000 | 0.3582 |
|
622 |
+
| 5.8039 | 90500 | 0.3556 |
|
623 |
+
| 5.8360 | 91000 | 0.3716 |
|
624 |
+
| 5.8680 | 91500 | 0.3615 |
|
625 |
+
| 5.9001 | 92000 | 0.3532 |
|
626 |
+
| 5.9321 | 92500 | 0.3747 |
|
627 |
+
| 5.9642 | 93000 | 0.3521 |
|
628 |
+
| 5.9963 | 93500 | 0.362 |
|
629 |
+
| 6.0 | 93558 | - |
|
630 |
+
| 6.0283 | 94000 | 0.3701 |
|
631 |
+
| 6.0604 | 94500 | 0.3636 |
|
632 |
+
| 6.0925 | 95000 | 0.3556 |
|
633 |
+
| 6.1245 | 95500 | 0.3508 |
|
634 |
+
| 6.1566 | 96000 | 0.3626 |
|
635 |
+
| 6.1887 | 96500 | 0.3618 |
|
636 |
+
| 6.2207 | 97000 | 0.3683 |
|
637 |
+
| 6.2528 | 97500 | 0.362 |
|
638 |
+
| 6.2849 | 98000 | 0.3534 |
|
639 |
+
| 6.3169 | 98500 | 0.3643 |
|
640 |
+
| 6.3490 | 99000 | 0.36 |
|
641 |
+
| 6.3811 | 99500 | 0.3592 |
|
642 |
+
| 6.4131 | 100000 | 0.3606 |
|
643 |
+
| 6.4452 | 100500 | 0.369 |
|
644 |
+
| 6.4773 | 101000 | 0.3607 |
|
645 |
+
| 6.5093 | 101500 | 0.3683 |
|
646 |
+
| 6.5414 | 102000 | 0.3648 |
|
647 |
+
| 6.5735 | 102500 | 0.3481 |
|
648 |
+
| 6.6055 | 103000 | 0.3565 |
|
649 |
+
| 6.6376 | 103500 | 0.3555 |
|
650 |
+
| 6.6697 | 104000 | 0.347 |
|
651 |
+
| 6.7017 | 104500 | 0.3585 |
|
652 |
+
| 6.7338 | 105000 | 0.3553 |
|
653 |
+
| 6.7659 | 105500 | 0.3539 |
|
654 |
+
| 6.7979 | 106000 | 0.3638 |
|
655 |
+
| 6.8300 | 106500 | 0.3674 |
|
656 |
+
| 6.8621 | 107000 | 0.3674 |
|
657 |
+
| 6.8941 | 107500 | 0.3617 |
|
658 |
+
| 6.9262 | 108000 | 0.3655 |
|
659 |
+
| 6.9583 | 108500 | 0.3593 |
|
660 |
+
| 6.9903 | 109000 | 0.3603 |
|
661 |
+
| 7.0 | 109151 | - |
|
662 |
+
| 7.0224 | 109500 | 0.3614 |
|
663 |
+
| 7.0544 | 110000 | 0.3655 |
|
664 |
+
| 7.0865 | 110500 | 0.3597 |
|
665 |
+
| 7.1186 | 111000 | 0.3443 |
|
666 |
+
| 7.1506 | 111500 | 0.3781 |
|
667 |
+
| 7.1827 | 112000 | 0.3587 |
|
668 |
+
| 7.2148 | 112500 | 0.3676 |
|
669 |
+
| 7.2468 | 113000 | 0.357 |
|
670 |
+
| 7.2789 | 113500 | 0.3639 |
|
671 |
+
| 7.3110 | 114000 | 0.3691 |
|
672 |
+
| 7.3430 | 114500 | 0.3606 |
|
673 |
+
| 7.3751 | 115000 | 0.3679 |
|
674 |
+
| 7.4072 | 115500 | 0.3697 |
|
675 |
+
| 7.4392 | 116000 | 0.3726 |
|
676 |
+
| 7.4713 | 116500 | 0.3603 |
|
677 |
+
| 7.5034 | 117000 | 0.3655 |
|
678 |
+
| 7.5354 | 117500 | 0.3639 |
|
679 |
+
| 7.5675 | 118000 | 0.3557 |
|
680 |
+
| 7.5996 | 118500 | 0.358 |
|
681 |
+
| 7.6316 | 119000 | 0.3526 |
|
682 |
+
| 7.6637 | 119500 | 0.3579 |
|
683 |
+
| 7.6958 | 120000 | 0.3584 |
|
684 |
+
| 7.7278 | 120500 | 0.3507 |
|
685 |
+
| 7.7599 | 121000 | 0.3472 |
|
686 |
+
| 7.7920 | 121500 | 0.3757 |
|
687 |
+
| 7.8240 | 122000 | 0.3717 |
|
688 |
+
| 7.8561 | 122500 | 0.3646 |
|
689 |
+
| 7.8882 | 123000 | 0.3662 |
|
690 |
+
| 7.9202 | 123500 | 0.3668 |
|
691 |
+
| 7.9523 | 124000 | 0.3677 |
|
692 |
+
| 7.9844 | 124500 | 0.3588 |
|
693 |
+
| 8.0 | 124744 | - |
|
694 |
+
|
695 |
+
</details>
|
696 |
+
|
697 |
+
### Framework Versions
|
698 |
+
- Python: 3.10.12
|
699 |
+
- Sentence Transformers: 3.2.0
|
700 |
+
- Transformers: 4.44.2
|
701 |
+
- PyTorch: 2.4.1+cu121
|
702 |
+
- Accelerate: 0.34.2
|
703 |
+
- Datasets: 3.0.1
|
704 |
+
- Tokenizers: 0.19.1
|
705 |
+
|
706 |
+
## Citation
|
707 |
+
|
708 |
+
### BibTeX
|
709 |
+
|
710 |
+
#### Sentence Transformers
|
711 |
+
```bibtex
|
712 |
+
@inproceedings{reimers-2019-sentence-bert,
|
713 |
+
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
|
714 |
+
author = "Reimers, Nils and Gurevych, Iryna",
|
715 |
+
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
|
716 |
+
month = "11",
|
717 |
+
year = "2019",
|
718 |
+
publisher = "Association for Computational Linguistics",
|
719 |
+
url = "https://arxiv.org/abs/1908.10084",
|
720 |
+
}
|
721 |
+
```
|
722 |
+
|
723 |
+
<!--
|
724 |
+
## Glossary
|
725 |
+
|
726 |
+
*Clearly define terms in order to be accessible across audiences.*
|
727 |
+
-->
|
728 |
+
|
729 |
+
<!--
|
730 |
+
## Model Card Authors
|
731 |
+
|
732 |
+
*Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
|
733 |
+
-->
|
734 |
+
|
735 |
+
<!--
|
736 |
+
## Model Card Contact
|
737 |
+
|
738 |
+
*Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
|
739 |
+
-->
|
config_sentence_transformers.json
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"__version__": {
|
3 |
+
"sentence_transformers": "3.2.0",
|
4 |
+
"transformers": "4.44.2",
|
5 |
+
"pytorch": "2.4.1+cu121"
|
6 |
+
},
|
7 |
+
"prompts": {
|
8 |
+
"retrieval.query": "Represent the query for retrieving evidence documents: ",
|
9 |
+
"retrieval.passage": "Represent the document for retrieval: ",
|
10 |
+
"separation": "",
|
11 |
+
"classification": "",
|
12 |
+
"text-matching": ""
|
13 |
+
},
|
14 |
+
"default_prompt_name": null,
|
15 |
+
"similarity_fn_name": "cosine"
|
16 |
+
}
|
modules.json
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[
|
2 |
+
{
|
3 |
+
"idx": 0,
|
4 |
+
"name": "transformer",
|
5 |
+
"path": "0_Transformer",
|
6 |
+
"type": "custom_st.Transformer"
|
7 |
+
},
|
8 |
+
{
|
9 |
+
"idx": 1,
|
10 |
+
"name": "pooler",
|
11 |
+
"path": "1_Pooling",
|
12 |
+
"type": "sentence_transformers.models.Pooling"
|
13 |
+
},
|
14 |
+
{
|
15 |
+
"idx": 2,
|
16 |
+
"name": "normalizer",
|
17 |
+
"path": "2_Normalize",
|
18 |
+
"type": "sentence_transformers.models.Normalize"
|
19 |
+
}
|
20 |
+
]
|