manikeerthi commited on
Commit
5bdf3a9
1 Parent(s): 96fa80b

Upload folder using huggingface_hub

Browse files
1_Pooling/config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 384,
3
+ "pooling_mode_cls_token": true,
4
+ "pooling_mode_mean_tokens": false,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false,
7
+ "pooling_mode_weightedmean_tokens": false,
8
+ "pooling_mode_lasttoken": false
9
+ }
README.md ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: sentence-transformers
3
+ pipeline_tag: sentence-similarity
4
+ tags:
5
+ - sentence-transformers
6
+ - feature-extraction
7
+ - sentence-similarity
8
+
9
+ ---
10
+
11
+ # {MODEL_NAME}
12
+
13
+ This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 384 dimensional dense vector space and can be used for tasks like clustering or semantic search.
14
+
15
+ <!--- Describe your model here -->
16
+
17
+ ## Usage (Sentence-Transformers)
18
+
19
+ Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
20
+
21
+ ```
22
+ pip install -U sentence-transformers
23
+ ```
24
+
25
+ Then you can use the model like this:
26
+
27
+ ```python
28
+ from sentence_transformers import SentenceTransformer
29
+ sentences = ["This is an example sentence", "Each sentence is converted"]
30
+
31
+ model = SentenceTransformer('{MODEL_NAME}')
32
+ embeddings = model.encode(sentences)
33
+ print(embeddings)
34
+ ```
35
+
36
+
37
+
38
+ ## Evaluation Results
39
+
40
+ <!--- Describe how your model was evaluated -->
41
+
42
+ For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
43
+
44
+
45
+ ## Training
46
+ The model was trained with the parameters:
47
+
48
+ **DataLoader**:
49
+
50
+ `torch.utils.data.dataloader.DataLoader` of length 99 with parameters:
51
+ ```
52
+ {'batch_size': 10, 'sampler': 'torch.utils.data.sampler.SequentialSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
53
+ ```
54
+
55
+ **Loss**:
56
+
57
+ `sentence_transformers.losses.MultipleNegativesRankingLoss.MultipleNegativesRankingLoss` with parameters:
58
+ ```
59
+ {'scale': 20.0, 'similarity_fct': 'cos_sim'}
60
+ ```
61
+
62
+ Parameters of the fit()-Method:
63
+ ```
64
+ {
65
+ "epochs": 2,
66
+ "evaluation_steps": 50,
67
+ "evaluator": "sentence_transformers.evaluation.InformationRetrievalEvaluator.InformationRetrievalEvaluator",
68
+ "max_grad_norm": 1,
69
+ "optimizer_class": "<class 'torch.optim.adamw.AdamW'>",
70
+ "optimizer_params": {
71
+ "lr": 2e-05
72
+ },
73
+ "scheduler": "WarmupLinear",
74
+ "steps_per_epoch": null,
75
+ "warmup_steps": 19,
76
+ "weight_decay": 0.01
77
+ }
78
+ ```
79
+
80
+
81
+ ## Full Model Architecture
82
+ ```
83
+ SentenceTransformer(
84
+ (0): Transformer({'max_seq_length': 512, 'do_lower_case': True}) with Transformer model: BertModel
85
+ (1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False})
86
+ (2): Normalize()
87
+ )
88
+ ```
89
+
90
+ ## Citing & Authors
91
+
92
+ <!--- Describe where people can find more information -->
config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "BAAI/bge-small-en",
3
+ "architectures": [
4
+ "BertModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.1,
10
+ "hidden_size": 384,
11
+ "id2label": {
12
+ "0": "LABEL_0"
13
+ },
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 1536,
16
+ "label2id": {
17
+ "LABEL_0": 0
18
+ },
19
+ "layer_norm_eps": 1e-12,
20
+ "max_position_embeddings": 512,
21
+ "model_type": "bert",
22
+ "num_attention_heads": 12,
23
+ "num_hidden_layers": 12,
24
+ "pad_token_id": 0,
25
+ "position_embedding_type": "absolute",
26
+ "torch_dtype": "float32",
27
+ "transformers_version": "4.37.2",
28
+ "type_vocab_size": 2,
29
+ "use_cache": true,
30
+ "vocab_size": 30522
31
+ }
config_sentence_transformers.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "2.2.2",
4
+ "transformers": "4.28.1",
5
+ "pytorch": "1.13.0+cu117"
6
+ }
7
+ }
eval/Information-Retrieval_evaluation_results.csv ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,steps,cos_sim-Accuracy@1,cos_sim-Accuracy@3,cos_sim-Accuracy@5,cos_sim-Accuracy@10,cos_sim-Precision@1,cos_sim-Recall@1,cos_sim-Precision@3,cos_sim-Recall@3,cos_sim-Precision@5,cos_sim-Recall@5,cos_sim-Precision@10,cos_sim-Recall@10,cos_sim-MRR@10,cos_sim-NDCG@10,cos_sim-MAP@100,dot_score-Accuracy@1,dot_score-Accuracy@3,dot_score-Accuracy@5,dot_score-Accuracy@10,dot_score-Precision@1,dot_score-Recall@1,dot_score-Precision@3,dot_score-Recall@3,dot_score-Precision@5,dot_score-Recall@5,dot_score-Precision@10,dot_score-Recall@10,dot_score-MRR@10,dot_score-NDCG@10,dot_score-MAP@100
2
+ 0,-1,0.7272727272727273,0.8636363636363636,0.9545454545454546,1.0,0.7272727272727273,0.7272727272727273,0.2878787878787878,0.8636363636363636,0.19090909090909094,0.9545454545454546,0.10000000000000003,1.0,0.8058080808080806,0.852249505206517,0.805808080808081,0.7272727272727273,0.8636363636363636,0.9545454545454546,1.0,0.7272727272727273,0.7272727272727273,0.2878787878787878,0.8636363636363636,0.19090909090909094,0.9545454545454546,0.10000000000000003,1.0,0.8058080808080806,0.852249505206517,0.805808080808081
3
+ 1,-1,0.6818181818181818,0.8636363636363636,0.9545454545454546,1.0,0.6818181818181818,0.6818181818181818,0.2878787878787878,0.8636363636363636,0.19090909090909094,0.9545454545454546,0.10000000000000003,1.0,0.783080808080808,0.8354735849143105,0.7830808080808083,0.6818181818181818,0.8636363636363636,0.9545454545454546,1.0,0.6818181818181818,0.6818181818181818,0.2878787878787878,0.8636363636363636,0.19090909090909094,0.9545454545454546,0.10000000000000003,1.0,0.783080808080808,0.8354735849143105,0.7830808080808083
4
+ 0,50,0.35365853658536583,0.7723577235772358,0.8414634146341463,0.9024390243902439,0.35365853658536583,0.17682926829268292,0.25745257452574527,0.3861788617886179,0.16829268292682925,0.42073170731707316,0.0902439024390244,0.45121951219512196,0.5791198864369597,0.40467193461960477,0.29156911996718043,0.35365853658536583,0.7723577235772358,0.8414634146341463,0.9024390243902439,0.35365853658536583,0.17682926829268292,0.25745257452574527,0.3861788617886179,0.16829268292682925,0.42073170731707316,0.0902439024390244,0.45121951219512196,0.5791198864369597,0.40467193461960477,0.29156911996718043
5
+ 0,-1,0.35365853658536583,0.7804878048780488,0.8577235772357723,0.9186991869918699,0.35365853658536583,0.17682926829268292,0.2601626016260163,0.3902439024390244,0.17154471544715447,0.42886178861788615,0.091869918699187,0.45934959349593496,0.5857352561620855,0.41018396863834106,0.2944554992786015,0.35365853658536583,0.7804878048780488,0.8577235772357723,0.9186991869918699,0.35365853658536583,0.17682926829268292,0.2601626016260163,0.3902439024390244,0.17154471544715447,0.42886178861788615,0.091869918699187,0.45934959349593496,0.5857352561620855,0.41018396863834106,0.2944554992786015
6
+ 1,50,0.34959349593495936,0.7764227642276422,0.8536585365853658,0.9227642276422764,0.34959349593495936,0.17479674796747968,0.25880758807588083,0.3882113821138211,0.17073170731707318,0.4268292682926829,0.09227642276422765,0.4613821138211382,0.5819073428829529,0.4088719332187641,0.29238317508053707,0.34959349593495936,0.7764227642276422,0.8536585365853658,0.9227642276422764,0.34959349593495936,0.17479674796747968,0.25880758807588083,0.3882113821138211,0.17073170731707318,0.4268292682926829,0.09227642276422765,0.4613821138211382,0.5819073428829529,0.4088719332187641,0.29238317508053707
7
+ 1,-1,0.35772357723577236,0.7845528455284553,0.8577235772357723,0.9227642276422764,0.35772357723577236,0.17886178861788618,0.2615176151761518,0.39227642276422764,0.17154471544715447,0.42886178861788615,0.09227642276422765,0.4613821138211382,0.5877064782552589,0.4115902825995784,0.2953281451780637,0.35772357723577236,0.7845528455284553,0.8577235772357723,0.9227642276422764,0.35772357723577236,0.17886178861788618,0.2615176151761518,0.39227642276422764,0.17154471544715447,0.42886178861788615,0.09227642276422765,0.4613821138211382,0.5877064782552589,0.4115902825995784,0.2953281451780637
8
+ 0,50,0.35365853658536583,0.7723577235772358,0.8414634146341463,0.9065040650406504,0.35365853658536583,0.17682926829268292,0.25745257452574527,0.3861788617886179,0.16829268292682925,0.42073170731707316,0.09065040650406504,0.4532520325203252,0.5791811846689897,0.40527819888955446,0.2913175110321673,0.35365853658536583,0.7723577235772358,0.8414634146341463,0.9065040650406504,0.35365853658536583,0.17682926829268292,0.25745257452574527,0.3861788617886179,0.16829268292682925,0.42073170731707316,0.09065040650406504,0.4532520325203252,0.5791811846689897,0.40527819888955446,0.2913175110321673
9
+ 0,-1,0.34552845528455284,0.7723577235772358,0.8577235772357723,0.9146341463414634,0.34552845528455284,0.17276422764227642,0.25745257452574527,0.3861788617886179,0.17154471544715444,0.42886178861788615,0.09146341463414634,0.4573170731707317,0.5791311782165443,0.406543818927041,0.29123770467261045,0.34552845528455284,0.7723577235772358,0.8577235772357723,0.9146341463414634,0.34552845528455284,0.17276422764227642,0.25745257452574527,0.3861788617886179,0.17154471544715444,0.42886178861788615,0.09146341463414634,0.4573170731707317,0.5791311782165443,0.406543818927041,0.29123770467261045
10
+ 1,50,0.34959349593495936,0.7723577235772358,0.8495934959349594,0.9186991869918699,0.34959349593495936,0.17479674796747968,0.25745257452574527,0.3861788617886179,0.16991869918699185,0.4247967479674797,0.091869918699187,0.45934959349593496,0.5821009162472577,0.4083924003073017,0.2925662078782343,0.34959349593495936,0.7723577235772358,0.8495934959349594,0.9186991869918699,0.34959349593495936,0.17479674796747968,0.25745257452574527,0.3861788617886179,0.16991869918699185,0.4247967479674797,0.091869918699187,0.45934959349593496,0.5821009162472577,0.4083924003073017,0.2925662078782343
11
+ 1,-1,0.34552845528455284,0.7682926829268293,0.8495934959349594,0.9186991869918699,0.34552845528455284,0.17276422764227642,0.2560975609756098,0.38414634146341464,0.16991869918699185,0.4247967479674797,0.091869918699187,0.45934959349593496,0.5783955994321849,0.4066643884523926,0.29075655615322343,0.34552845528455284,0.7682926829268293,0.8495934959349594,0.9186991869918699,0.34552845528455284,0.17276422764227642,0.2560975609756098,0.38414634146341464,0.16991869918699185,0.4247967479674797,0.091869918699187,0.45934959349593496,0.5783955994321849,0.4066643884523926,0.29075655615322343
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ceaaa7335c45f231c248c3dbb211e5fd3c2041ac10b937e0a989a07eb1f9af1e
3
+ size 133462128
modules.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ },
14
+ {
15
+ "idx": 2,
16
+ "name": "2",
17
+ "path": "2_Normalize",
18
+ "type": "sentence_transformers.models.Normalize"
19
+ }
20
+ ]
sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 512,
3
+ "do_lower_case": true
4
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": {
3
+ "content": "[CLS]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "mask_token": {
10
+ "content": "[MASK]",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "sep_token": {
24
+ "content": "[SEP]",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "unk_token": {
31
+ "content": "[UNK]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ }
37
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_basic_tokenize": true,
47
+ "do_lower_case": true,
48
+ "mask_token": "[MASK]",
49
+ "model_max_length": 512,
50
+ "never_split": null,
51
+ "pad_token": "[PAD]",
52
+ "sep_token": "[SEP]",
53
+ "strip_accents": null,
54
+ "tokenize_chinese_chars": true,
55
+ "tokenizer_class": "BertTokenizer",
56
+ "unk_token": "[UNK]"
57
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff