Rodion commited on
Commit
55cc9f2
1 Parent(s): 4e05be7

intial commit

Browse files
0_Transformer/config.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "C:\\Users\\jeti8\\OneDrive - Universitaet Duisburg-Essen\\Desktop\\Implementation\\SGD-Recognizer\\src\\embedding\\..\\..\\saved_model\\sbert_trained_sdg_sim_score\\0_Transformer",
3
+ "architectures": [
4
+ "MPNetModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-05,
15
+ "max_position_embeddings": 514,
16
+ "model_type": "mpnet",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "pad_token_id": 1,
20
+ "relative_attention_num_buckets": 32,
21
+ "transformers_version": "4.7.0",
22
+ "vocab_size": 30527
23
+ }
0_Transformer/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4dcf04827993a27a6ea5b65f3cbc4f4d0582930200bbbe4e5915a8723880611
3
+ size 438027631
0_Transformer/sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
1
+ {
2
+ "max_seq_length": null,
3
+ "do_lower_case": false
4
+ }
0_Transformer/special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "[UNK]", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
0_Transformer/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
0_Transformer/tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"do_lower_case": true, "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "[UNK]", "pad_token": "<pad>", "mask_token": "<mask>", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "C:\\Users\\jeti8\\OneDrive - Universitaet Duisburg-Essen\\Desktop\\Implementation\\SGD-Recognizer\\src\\embedding\\..\\..\\saved_model\\sbert_trained_sdg_sim_score\\0_Transformer", "tokenizer_class": "MPNetTokenizer"}
0_Transformer/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
1_Pooling/config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 768,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false
7
+ }
README.md CHANGED
@@ -1,3 +1,126 @@
1
  ---
2
- license: afl-3.0
 
 
 
 
 
3
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ pipeline_tag: sentence-similarity
3
+ tags:
4
+ - sentence-transformers
5
+ - feature-extraction
6
+ - sentence-similarity
7
+ - transformers
8
  ---
9
+
10
+ # {MODEL_NAME}
11
+
12
+ This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
13
+
14
+ <!--- Describe your model here -->
15
+
16
+ ## Usage (Sentence-Transformers)
17
+
18
+ Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
19
+
20
+ ```
21
+ pip install -U sentence-transformers
22
+ ```
23
+
24
+ Then you can use the model like this:
25
+
26
+ ```python
27
+ from sentence_transformers import SentenceTransformer
28
+ sentences = ["This is an example sentence", "Each sentence is converted"]
29
+
30
+ model = SentenceTransformer('{MODEL_NAME}')
31
+ embeddings = model.encode(sentences)
32
+ print(embeddings)
33
+ ```
34
+
35
+
36
+
37
+ ## Usage (HuggingFace Transformers)
38
+ Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
39
+
40
+ ```python
41
+ from transformers import AutoTokenizer, AutoModel
42
+ import torch
43
+
44
+
45
+ #Mean Pooling - Take attention mask into account for correct averaging
46
+ def mean_pooling(model_output, attention_mask):
47
+ token_embeddings = model_output[0] #First element of model_output contains all token embeddings
48
+ input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
49
+ return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
50
+
51
+
52
+ # Sentences we want sentence embeddings for
53
+ sentences = ['This is an example sentence', 'Each sentence is converted']
54
+
55
+ # Load model from HuggingFace Hub
56
+ tokenizer = AutoTokenizer.from_pretrained('{MODEL_NAME}')
57
+ model = AutoModel.from_pretrained('{MODEL_NAME}')
58
+
59
+ # Tokenize sentences
60
+ encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
61
+
62
+ # Compute token embeddings
63
+ with torch.no_grad():
64
+ model_output = model(**encoded_input)
65
+
66
+ # Perform pooling. In this case, max pooling.
67
+ sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
68
+
69
+ print("Sentence embeddings:")
70
+ print(sentence_embeddings)
71
+ ```
72
+
73
+
74
+
75
+ ## Evaluation Results
76
+
77
+ <!--- Describe how your model was evaluated -->
78
+
79
+ For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
80
+
81
+
82
+ ## Training
83
+ The model was trained with the parameters:
84
+
85
+ **DataLoader**:
86
+
87
+ `torch.utils.data.dataloader.DataLoader` of length 219 with parameters:
88
+ ```
89
+ {'batch_size': 64, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
90
+ ```
91
+
92
+ **Loss**:
93
+
94
+ `sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
95
+
96
+ Parameters of the fit()-Method:
97
+ ```
98
+ {
99
+ "callback": null,
100
+ "epochs": 2,
101
+ "evaluation_steps": 5,
102
+ "evaluator": "sentence_transformers.evaluation.EmbeddingSimilarityEvaluator.EmbeddingSimilarityEvaluator",
103
+ "max_grad_norm": 1,
104
+ "optimizer_class": "<class 'transformers.optimization.AdamW'>",
105
+ "optimizer_params": {
106
+ "lr": 2e-05
107
+ },
108
+ "scheduler": "WarmupLinear",
109
+ "steps_per_epoch": null,
110
+ "warmup_steps": 0,
111
+ "weight_decay": 0.01
112
+ }
113
+ ```
114
+
115
+
116
+ ## Full Model Architecture
117
+ ```
118
+ SentenceTransformer(
119
+ (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: MPNetModel
120
+ (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
121
+ )
122
+ ```
123
+
124
+ ## Citing & Authors
125
+
126
+ <!--- Describe where people can find more information -->
config.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "C:\\Users\\Anwender\\Desktop\\rodion Github\\SGD-Recognizer\\src\\embedding\\..\\..\\saved_model\\sbert_trained_sdg_sim_score\\",
3
+ "architectures": [
4
+ "MPNetModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-05,
15
+ "max_position_embeddings": 514,
16
+ "model_type": "mpnet",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "pad_token_id": 1,
20
+ "relative_attention_num_buckets": 32,
21
+ "transformers_version": "4.8.2",
22
+ "vocab_size": 30527
23
+ }
config_sentence_transformers.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "2.0.0",
4
+ "transformers": "4.8.2",
5
+ "pytorch": "1.9.0+cpu"
6
+ }
7
+ }
eval/similarity_evaluation_results.csv ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
2
+ 0,5,0.33827715430810007,0.33162649301326547,0.31170097226401045,0.3359935633918943,0.3146917335081728,0.338040307127236,0.33419668331545804,0.3150637690753516
3
+ 0,10,0.3725267519002703,0.3780523062147625,0.3404091374498096,0.3704666199166015,0.3438667510448096,0.37213320733577554,0.3695585645472669,0.3646734435084104
4
+ 0,15,0.37854595380082934,0.409301046380089,0.3453387892366395,0.3875204318085424,0.34938704587484504,0.389717918123264,0.37545369030013975,0.4029966845272314
5
+ 0,20,0.36426298663493467,0.41071439868691345,0.34711695522794533,0.3993236409765227,0.35139972549559745,0.4012763618903503,0.3556697401085265,0.4012693494846455
6
+ 0,25,0.39838014378209907,0.4348700850330256,0.37485554529432624,0.41782214659367883,0.3787385683484082,0.4186591210339969,0.3922323825635922,0.4280370716569509
7
+ 0,30,0.43288514577935966,0.4562865248466764,0.39982998314077134,0.43164818714191205,0.40268484705447233,0.4315856038861987,0.4309470950694133,0.4529175440775929
8
+ 0,35,0.4561598870922999,0.4637032947548486,0.4226240368283461,0.4442976136374518,0.4240495855709395,0.44288230439051174,0.45834701857265875,0.45861591634867427
9
+ 0,40,0.46535698445907475,0.4611773637841484,0.44134054814006546,0.45637332251047713,0.44152012079579933,0.45413893457164917,0.46657409003402794,0.44784460462757664
10
+ 0,45,0.4897582888034264,0.4904836314898634,0.45896175693341157,0.4803147169574416,0.45986361048676383,0.4788040700126974,0.4914529760315874,0.4803715035003189
11
+ 0,50,0.5064261077257143,0.5179410596481275,0.4651139306388286,0.49756260883790265,0.4668529575517125,0.4967989991994255,0.5090535289278771,0.5133118030032786
12
+ 0,55,0.5172686569207935,0.5318279307401947,0.47527131852574284,0.5114939503609536,0.47723145287402324,0.5108516791989761,0.5215695827739271,0.5281086240104601
13
+ 0,60,0.5238992483057145,0.5323812679322258,0.4856633311705051,0.5172561012516497,0.4872148486964394,0.5163423886875157,0.5285012309499421,0.5261398273469072
14
+ 0,65,0.5376156134920101,0.5454725017073174,0.4959411908364158,0.5280555981288877,0.49766120328205504,0.5271707451267535,0.5410825432102488,0.5401917447924686
15
+ 0,70,0.5370502119339555,0.5624124137909645,0.49463633611886637,0.5374517459343133,0.4974523797277281,0.5378258368510342,0.5424813358430995,0.5622656048096588
16
+ 0,75,0.5202722643271235,0.5511505020474577,0.4938287883696754,0.5335759886310185,0.49687416973677173,0.5345922811707924,0.5251256227091345,0.5489182661133221
17
+ 0,80,0.5566190052639798,0.571906219045338,0.5349523224701794,0.5599647206559368,0.535876036156402,0.5592665322077984,0.5616572535269524,0.565637719180788
18
+ 0,85,0.5760960041151204,0.5647742336084162,0.5518506696708262,0.5639077616197226,0.551139332847499,0.5611048950347892,0.5730962343364219,0.5521900816901449
19
+ 0,90,0.5893214857229789,0.5823044167323379,0.5535516658365095,0.5714582372215499,0.5538265303101472,0.5694362722058712,0.5868336504490638,0.5757808613201606
20
+ 0,95,0.5879534077947781,0.6003730087431444,0.5525755917640246,0.5810518468368693,0.5545160908739267,0.5810446113984302,0.5931677957788345,0.5992821211160109
21
+ 0,100,0.5682009016523614,0.5944926812997767,0.5484054820245872,0.5802960317531902,0.5509704001663784,0.5809858926914143,0.5765038018329566,0.5935591996622136
22
+ 0,105,0.58918274887187,0.6082500449463,0.5711442494714681,0.5967831656739675,0.5728310366296868,0.5965100424664412,0.5975394643174025,0.6053737732828459
23
+ 0,110,0.6159268684643914,0.6204488238860008,0.5850177575399399,0.6075174835115545,0.5861152104802931,0.6063535534441692,0.6186924060788849,0.6170379141400765
24
+ 0,-1,0.6159268684643914,0.6204488238860008,0.5850177575399399,0.6075174835115545,0.5861152104802931,0.6063535534441692,0.6186924060788849,0.6170379141400765
25
+ 1,5,0.625169486058543,0.6325477876840635,0.5850164619103004,0.6123857144595674,0.5864226669916677,0.6116349897402754,0.6269463282035955,0.6323208127198048
26
+ 1,10,0.6287021820073649,0.6408827063992498,0.5932174976713334,0.6213881150762798,0.594834874519375,0.6207292722884408,0.6338583161915043,0.6412747522807444
27
+ 1,15,0.6386076642985159,0.6464147696372525,0.6072593673163692,0.6314252342914011,0.6083814503838672,0.6299885767504135,0.6430534303929778,0.6448342230298159
28
+ 1,20,0.6489153767003253,0.6492209426880248,0.6162984599399678,0.6367126963215479,0.6168447862509078,0.6348940374424058,0.6488056686458878,0.6454147231309769
29
+ 1,25,0.6582342594043842,0.6579311743009586,0.6206848760723448,0.6415198295354692,0.6210746753187312,0.6397308955819688,0.6567801452392852,0.6557964759969428
30
+ 1,30,0.6647040557756615,0.6683092277532237,0.6309335235221593,0.651640408595049,0.6314014269578877,0.6502405426835369,0.6669007263241261,0.667267555136875
31
+ 1,35,0.6736084917668039,0.6751992472090783,0.6432519957650422,0.6619785468591803,0.6434632382537193,0.6600792393243416,0.6752370835011919,0.6726529682425525
32
+ 1,40,0.6775983160148017,0.6745685984002273,0.6516852852134529,0.6663208016321769,0.6514167872302937,0.6640457198584221,0.6772768663225481,0.6691929210677163
33
+ 1,45,0.6841203077667128,0.687302019762093,0.656290844061432,0.6746493683691108,0.6566403368495352,0.6729920142570569,0.6874061512444007,0.6848446793067314
34
+ 1,50,0.6874560909947028,0.6918573194215105,0.654033378806131,0.6754487368343861,0.6546297121946236,0.6741011775603164,0.6903566811472364,0.6911056415348329
35
+ 1,55,0.6978301339306339,0.697376754016573,0.659206517326233,0.679625675326768,0.6595751041382356,0.6780832591163434,0.6980587577299203,0.69647471584249
36
+ 1,60,0.70382401840802,0.6983843366998659,0.6704846136718265,0.6868363519952753,0.6706605649135313,0.6850189651244558,0.7025873197697637,0.6947172481435222
37
+ 1,65,0.7063806275716633,0.7051086934470093,0.676393556132251,0.6942005823190854,0.6770621321130956,0.6930346449564208,0.7078060885079005,0.7018056600610753
38
+ 1,70,0.7043459718195118,0.7110842536236537,0.672786696362691,0.6958146386527497,0.6743556481863496,0.6957100269815643,0.7088316268221827,0.7110007482987087
39
+ 1,75,0.7038707029353207,0.712197226102971,0.672317846607162,0.6961768234296601,0.6741870741826884,0.6965011701700827,0.7090507222238327,0.7125378366004604
40
+ 1,80,0.7149652690902956,0.7177958613435146,0.6831206422718593,0.7038019407429094,0.6846761700926339,0.7036773737880334,0.7184282272270067,0.7169386639286339
41
+ 1,85,0.7219689062630971,0.7210036887755895,0.6891860252229476,0.7080848016873915,0.6904824405902332,0.7076812497522196,0.7238917345322892,0.7194567250703817
42
+ 1,90,0.7279779240517772,0.7221673986631849,0.6950157206648991,0.7115716018074775,0.6959186340812952,0.7107795068287833,0.7274504932228194,0.7192977648625409
43
+ 1,95,0.7289822296511917,0.7252704842142442,0.6965650148861894,0.7137501296260392,0.6976046318051253,0.7131276639539404,0.7298823001968847,0.7232138142048875
44
+ 1,100,0.7290781083195821,0.7268086135893732,0.6970668495650522,0.7147317216871006,0.6982084981793073,0.7141610186599892,0.7307486458705007,0.7250951462063016
45
+ 1,105,0.7283218403184728,0.7274806159096822,0.6965887768593259,0.7148101367577715,0.6978484846901586,0.7143881756732531,0.7304917112515777,0.7259027261161017
46
+ 1,110,0.7282294529679018,0.7276803036279169,0.6966704502193022,0.7150108526032584,0.6979577912903807,0.7146423472996355,0.7305159914378881,0.7261895163964308
47
+ 1,-1,0.7282294529679018,0.7276803036279169,0.6966704502193022,0.7150108526032584,0.6979577912903807,0.7146423472996355,0.7305159914378881,0.7261895163964308
48
+ 0,5,0.7323451406037876,0.7173717173143852,0.6992377185678319,0.7090523739227497,0.698964615617425,0.707285219039384,0.7275095734010786,0.712419677470065
49
+ 0,10,0.7054721864931444,0.7140181303785297,0.6845372923736663,0.7020162164085757,0.6856756175758452,0.7021944358172135,0.7114018408642043,0.7136480721548426
50
+ 0,15,0.7579204969792183,0.73468363416409,0.7370141899100846,0.7334176692527481,0.7367401355031229,0.7322472606906744,0.756216811766451,0.7295635750922818
51
+ 0,20,0.7241210017527298,0.7274978082681582,0.7022796101557145,0.7136705202760094,0.7040198927911022,0.7144999342265311,0.7239324163632559,0.7257285374287195
52
+ 0,25,0.7720235175927896,0.7487599297304077,0.7457661717954595,0.7456824718526349,0.7462111363613171,0.7451118378455778,0.7686652638424926,0.7449949317201899
53
+ 0,30,0.7805342159604942,0.7648550316115822,0.7575197844966164,0.7582882354922184,0.7587100381544938,0.7584687111780887,0.7796291855214482,0.7617530925778774
54
+ 0,35,0.7910036037898863,0.7795971142092696,0.7676035333322492,0.7693351469956011,0.7687760380339465,0.769704849050138,0.7915049445887173,0.7778789237890531
55
+ 0,40,0.8047525655237959,0.7825228244148034,0.7799656394593895,0.7755583960482435,0.7802286184655927,0.7751466010757304,0.8021485720310192,0.7796928266211325
56
+ 0,45,0.7949147130711754,0.7831105584982095,0.7704168978243949,0.7696683282838754,0.7718129924435708,0.7703595497227667,0.7933520977086493,0.7814656492738826
57
+ 0,50,0.8254053747460384,0.7986298788368836,0.7942218410152884,0.7868600844662074,0.7943417443659992,0.7863635871104896,0.8191530577216722,0.7973153580995239
58
+ 0,55,0.8149328832398818,0.7998404637973843,0.8004131438863964,0.7922450793388339,0.8013157068361139,0.7923379383139819,0.8150623936820508,0.7973207420611724
59
+ 0,60,0.8296283035348725,0.8084462369284723,0.8133580832087707,0.8007128142423586,0.81378689545627,0.800375844774297,0.8287839681178104,0.8055862155486185
60
+ 0,65,0.8366283772013917,0.810253492135796,0.8083610531660367,0.7976758755868156,0.8088724692695222,0.7975395724731839,0.8308100006282911,0.8085433800155214
61
+ 0,70,0.8160877726964201,0.7990164261821163,0.7865553073393361,0.7823996932827186,0.7879527599935526,0.7827820550708801,0.8104273371182472,0.79708488241744
62
+ 0,75,0.8422573598827836,0.8078059485862753,0.8166610379314695,0.8004795801981929,0.8173373987076469,0.8004474541375295,0.8359411813499817,0.80445321832879
63
+ 0,80,0.8507651716266047,0.8125768758263923,0.8324469713589627,0.8105640665419939,0.8324761466278137,0.809928074899463,0.8468289002567332,0.808328287120721
64
+ 0,85,0.8417388591898227,0.8167766886349076,0.8237111786135424,0.8089297077221541,0.8244064148399498,0.8089305085184254,0.8406395371466512,0.8148471992697249
65
+ 0,90,0.8560702181318629,0.8219609385767405,0.8334347198935055,0.8135680066401489,0.8333754276095866,0.8129473442197109,0.8537029313948028,0.8208672988999856
66
+ 0,95,0.8594997944968105,0.8203021500729121,0.8325789064455729,0.8122486061342551,0.8322338759657167,0.8112137445434081,0.8537647725233661,0.8191038321235564
67
+ 0,100,0.8467477465663709,0.8151513043533181,0.8226320328946337,0.8058607934716683,0.8232696061338386,0.8054217200667102,0.8412470390480762,0.8128075899096726
68
+ 0,105,0.864042666316301,0.8233247516431811,0.8455148707486707,0.8198460939468177,0.8451861441348295,0.8188150178304148,0.8595887139307421,0.8196274049114395
69
+ 0,110,0.868367527361981,0.8300877021345786,0.8497361960903052,0.8234758021763786,0.8493545000359004,0.8227389558235525,0.865511928813012,0.8275011674347248
70
+ 0,-1,0.868367527361981,0.8300877021345786,0.8497361960903052,0.8234758021763786,0.8493545000359004,0.8227389558235525,0.865511928813012,0.8275011674347248
71
+ 1,5,0.871596086467681,0.8327842426192593,0.8492167746011944,0.8246970637188863,0.8488441027714716,0.8239229365994826,0.8679192330644647,0.830853166955312
72
+ 1,10,0.8695715233283839,0.8360566770189257,0.8494042787291669,0.8266559782990384,0.8496522235602951,0.8263826481250298,0.867310314438844,0.834611467022351
73
+ 1,15,0.868866318234391,0.8361045278091405,0.848806033517971,0.8261020466107131,0.8490360078678041,0.8256395108768756,0.8668342359019718,0.8349818341206786
74
+ 1,20,0.8718210968656214,0.8375398218620176,0.8519935036958353,0.8274802640615183,0.8521051977474117,0.8271729583342449,0.8697858600293671,0.8364159512907642
75
+ 1,25,0.8739372220341958,0.8339027881806652,0.8530923131761413,0.8281004870033505,0.8529031509401673,0.8275495264693222,0.8694632292469596,0.8317826460421587
76
+ 1,30,0.8763492216542826,0.8365626085492527,0.8580227781482379,0.8312532309983496,0.8578536171890232,0.8307970890801801,0.8724062705976142,0.833780103943191
77
+ 1,35,0.870534919575303,0.8368078513987273,0.8555462990883941,0.8298727521730459,0.8558816235702567,0.8297725187912207,0.8670807851295435,0.8331248400042164
78
+ 1,40,0.8743167522238425,0.8373825568668951,0.85153489764898,0.8273937312650219,0.8516533514781536,0.8269216575676451,0.8686491840522521,0.8348439047787776
79
+ 1,45,0.8806085261868989,0.8397441719983171,0.8566799070842341,0.8307272635498225,0.8561274767520971,0.829884330228956,0.8751347514479277,0.8379066821759907
80
+ 1,50,0.8816526042206593,0.8418211489913339,0.8605027066978291,0.8334447764187308,0.8598530917579015,0.8327472740614357,0.8776614397907897,0.8399696980522541
81
+ 1,55,0.8841096510923123,0.8473968605016944,0.8661771665734255,0.8377510120010956,0.8658698006720298,0.8374641632040368,0.8817706024215975,0.8457493060045292
82
+ 1,60,0.8839210262917737,0.8477609402131523,0.865664127147857,0.8377527718689135,0.8654456651052623,0.8375626398363967,0.8816832456980934,0.8462228105553244
83
+ 1,65,0.8867021189724126,0.848545734951409,0.8647761380436072,0.8375436360538129,0.8642854064962776,0.8371455285722317,0.8835953647217317,0.8474595542288131
84
+ 1,70,0.8871646452706735,0.8489740815497406,0.8641159107901353,0.8374295097353547,0.8636349111353685,0.837027313123551,0.8837398766614113,0.8479764990449565
85
+ 1,75,0.8875288026641102,0.8493697553688487,0.8649769177387626,0.8379059537931618,0.8645645469750785,0.8374833962936133,0.8840295579817075,0.8482117542509239
86
+ 1,80,0.8882525612156734,0.8492534504766323,0.8656007489714997,0.8382947640995062,0.8651125440040066,0.837844250291943,0.8844747160697533,0.8480165812981342
87
+ 1,85,0.8882867437932778,0.8494221054657964,0.8674065226349011,0.8392554449144576,0.8670272793541641,0.8389523725257101,0.8848106546619801,0.8478395705996733
88
+ 0,5,0.8679167275613882,0.8392825000128351,0.8498070996922901,0.8276834433323101,0.8501350419905098,0.8278981412633035,0.8618572376059785,0.8363093181035706
89
+ 0,10,0.8887349860575673,0.8457365732252248,0.8668378770287561,0.8391718747400299,0.8665034066450201,0.8388289507397411,0.8839380563746794,0.8429278572121315
90
+ 0,15,0.8877812652620934,0.8451114849302439,0.8705956580319006,0.8407695876907216,0.8702270987637925,0.840517865577873,0.8826843452807063,0.8400937231100333
91
+ 0,20,0.8737874668193486,0.8338898236637449,0.8555650256005959,0.8303017789102958,0.8543004804812163,0.8296187985709725,0.869075253136159,0.8305428838747559
92
+ 0,25,0.8706556080617714,0.8436957612962414,0.8537919274797451,0.8335982564331069,0.8536057034560269,0.8338536353922711,0.8657839991744172,0.840859993886661
93
+ 0,30,0.8896787243370439,0.8477378206778938,0.8587141688619713,0.8432579997668312,0.8581164977945295,0.8425586615276752,0.8757170011239765,0.8441987898767589
94
+ 0,35,0.89525494002112,0.8584325826561146,0.872524613510381,0.8499964255492712,0.8723689021428405,0.8499400101940708,0.8873335309070487,0.8547200893098559
95
+ 0,40,0.8957014546707155,0.8577273240478729,0.8716214742295126,0.847803153331678,0.8712491518097182,0.8474002535359508,0.888405643915785,0.8552645694831528
96
+ 0,45,0.8968823726667944,0.8566623879577617,0.8761497618313175,0.8483591310707408,0.8756054395382993,0.8479716563367387,0.8909510226125918,0.8542162096752622
97
+ 0,50,0.8972960109104818,0.8585446446306403,0.8826977865138071,0.8507208318855497,0.8823737268761819,0.8508479405807438,0.8926297331394111,0.8552242163438644
98
+ 0,55,0.9000696992128396,0.8540894181057321,0.8763843899624942,0.8459110397635238,0.8759486229749184,0.8456759190161588,0.892770561319024,0.851927591696908
99
+ 0,60,0.9007582864051924,0.8577880579452332,0.8824071697128935,0.8491215479452517,0.8819420867802081,0.8491816424495486,0.8966296982456242,0.8560601773228338
100
+ 0,65,0.8987639593053431,0.8588119181816156,0.8832443305899011,0.8499478162147194,0.8827055621027946,0.8502528216871921,0.8959506749102887,0.856791844585429
101
+ 0,70,0.9042728560498481,0.8573242259027876,0.8785493208900547,0.8483806991663686,0.8781183756074898,0.8483667559742281,0.8973345316021303,0.855364836794866
102
+ 0,75,0.9070486847248657,0.8587458441357814,0.8815116234297685,0.851004843577978,0.881057115883677,0.8507261265273521,0.8997692420873971,0.8555787787681199
103
+ 0,80,0.9115238179249339,0.8644252002043745,0.8896702054311392,0.8569397544290467,0.8888038579786194,0.8565102580729689,0.9068754466500615,0.8624194789246489
104
+ 0,85,0.9133754565932012,0.8641030075213382,0.8914096757432584,0.8560692921538365,0.8904878388932101,0.8556011657282903,0.9085956487411135,0.8621939306899863
105
+ 0,90,0.9143895798626201,0.8607851755208265,0.8904487587541513,0.8526555635771504,0.8895795059678429,0.8522714973107895,0.908205500138063,0.8586874084659387
106
+ 0,95,0.9106443709931665,0.8621019028809177,0.8876162904879097,0.8498252097916447,0.8866744671704001,0.8494427165230843,0.9052526873103555,0.8602653872342196
107
+ 0,100,0.9147635248400543,0.8684139322800241,0.8959895893069166,0.8554440375928916,0.8948205403676301,0.8549174965320185,0.910967631720253,0.8665932602799038
108
+ 0,105,0.9198767816167137,0.8718631807630842,0.9036855792457472,0.8616058473157839,0.9025657997409149,0.8612093714969976,0.9169127802126982,0.8698503815145749
109
+ 0,110,0.9164753173286644,0.8655381452141317,0.892061323640989,0.8541821275286703,0.8912752576686778,0.8535486951975514,0.9106338310131812,0.8635375074347306
110
+ 0,115,0.9165948096585355,0.8613641391701127,0.8902048647227107,0.8528978964147899,0.8895252127720701,0.852112134986689,0.91000626564515,0.8591635377122074
111
+ 0,120,0.918716966347592,0.8679245872546862,0.8930241980070296,0.8557440424983549,0.8925271302144391,0.8552209291495979,0.9129720602651781,0.8658291471574356
112
+ 0,125,0.914880119854865,0.8624533061308517,0.8834574685059688,0.8509792400632793,0.882940004673272,0.8504166768129119,0.9080863248147976,0.8612032532800468
113
+ 0,130,0.9160443838180188,0.8711973861314996,0.8991323946396322,0.8601412937438344,0.898568579934373,0.8601108281946677,0.914861273941567,0.8693849864087044
114
+ 0,135,0.9170809925428598,0.8697309790810729,0.8935051261888771,0.8569401153982782,0.8933803341691966,0.8569744135100255,0.9153205659348177,0.8684433652731733
115
+ 0,140,0.9141252981914598,0.859820692859365,0.8802706366854371,0.8488588897464282,0.8805703252146797,0.8489805257549261,0.9092202686024786,0.8584659824331501
116
+ 0,145,0.9148985021848397,0.8641325115060207,0.8885684791664167,0.8532521440864276,0.8885241446121035,0.8529412474297874,0.9103850099918301,0.8618200743860479
117
+ 0,150,0.9129342677503718,0.863776698082065,0.8954219746952089,0.8558475189303277,0.8950151278931474,0.8552998841405038,0.9087990811823968,0.8599876208493757
118
+ 0,155,0.9160158432265336,0.8561401122033857,0.8897239799892765,0.8540527623062999,0.8894062890046964,0.8535243119869788,0.9097505677378808,0.8523238351146983
119
+ 0,160,0.9197276432644652,0.8644924113413055,0.8908282946986539,0.8565161984175605,0.8904069469426518,0.8560909249845661,0.9137922220477573,0.863508598191453
120
+ 0,165,0.9185687183178386,0.8712942834099664,0.9002272059763696,0.8611817993517821,0.8996064220908164,0.8609160921551977,0.9148204150272158,0.8689520013136893
121
+ 0,170,0.9235060245078897,0.8705604988444134,0.9025310509798454,0.8628864271622516,0.9020421484087672,0.8626646559336794,0.9195307879388939,0.8687608925224247
122
+ 0,175,0.9226073485401868,0.8652685188723492,0.8998523636626469,0.8612870061086231,0.899424361606084,0.8610948111952944,0.9180578046728773,0.863359003141773
123
+ 0,180,0.9228455774172549,0.8652889018058397,0.9043841975027908,0.8622466120470325,0.9039937689703322,0.8619958068890765,0.91872681652699,0.8627923260880558
124
+ 0,185,0.9230940602052514,0.8666844785065909,0.9052002275789652,0.8629059772345069,0.9047027013961971,0.8626807321290579,0.9200731158647114,0.8647273115891472
125
+ 0,190,0.9239248176533206,0.867729066091266,0.8995457762885699,0.86182780953815,0.8991235452848253,0.8615494352147858,0.9207741730883738,0.8666529760890349
126
+ 0,195,0.9243437738781334,0.8698253920632459,0.8957381407486663,0.8619006387254782,0.8955197438803053,0.8615827307308436,0.9199858591534493,0.8683255536247906
127
+ 0,200,0.9228711717364891,0.875957086599986,0.9065925607029983,0.8679808296057967,0.9062356240047938,0.8678474745468656,0.9202570087727889,0.8736896304919392
128
+ 0,205,0.9256178506156987,0.8765056868664182,0.9064409853359258,0.8674620279769545,0.9061050164241116,0.8674467283248407,0.9233125709279514,0.8749388030737174
129
+ 0,210,0.926131696891535,0.8727643807556409,0.903004676759118,0.863931255922924,0.9026744178191182,0.8636951125003077,0.9229846339757564,0.871469419634475
130
+ 0,215,0.9271164117272236,0.8691334437035733,0.9059067157776326,0.8637242939344609,0.9056007195604957,0.8636145822218215,0.9238988387567878,0.8673494967769807
131
+ 0,-1,0.9265460331541898,0.8622576597545611,0.9018965558347299,0.8603663551844797,0.9016958174784424,0.8601629210749469,0.9222067529221164,0.8598655713198073
132
+ 1,5,0.9254656214442081,0.8605863072872301,0.8923828648736679,0.857998988820601,0.8922376598672682,0.8575620358363978,0.9195819758083116,0.858600473651038
133
+ 1,10,0.929069053070145,0.8713670941403995,0.8985600431431076,0.8630266254696762,0.8982114935695477,0.862721695032697,0.9246528955996565,0.8701816008751154
134
+ 1,15,0.9302433575943745,0.8748567757557896,0.9008846769696788,0.8654026049869064,0.9004614269605862,0.8651642701860398,0.9265208942296635,0.8738848278198885
135
+ 1,20,0.9299572886714847,0.8752703217758145,0.905386417671615,0.8672254117110791,0.9049169689438201,0.8670304407120084,0.9275369272528282,0.8742129372234204
136
+ 1,25,0.9299564165526902,0.8744956247574696,0.9044191528153095,0.8659361146476848,0.9040827391737545,0.8657845610218105,0.9269837759931308,0.8731818102522059
137
+ 1,30,0.9281591601807296,0.8717135657689413,0.8963656564617817,0.8611981343914743,0.8962442110668112,0.8609052352017534,0.923549851762846,0.8702442816105035
138
+ 1,35,0.9253511365054845,0.8677064149814759,0.8887851756543078,0.8569305789870668,0.8887496146333741,0.856600410521699,0.9195770244610093,0.8663649915321736
139
+ 1,40,0.9288862058319851,0.8739079370487097,0.900263848295694,0.8619282982808296,0.9000356067302264,0.8616795712771445,0.9246790948947042,0.8724742956163792
140
+ 1,45,0.9305228137275507,0.8774699244154426,0.911137105738087,0.8674097738774581,0.9107594685432376,0.867268369038324,0.9275785350232544,0.8758890406392797
141
+ 1,50,0.9306589052956732,0.877745263987269,0.9134521655206479,0.8685059343920065,0.9130627553641569,0.8683998242562905,0.9277738283034128,0.8759380195317933
142
+ 1,55,0.9308737012063322,0.8769993545878347,0.9091558369851596,0.8660761428270826,0.9089368487971657,0.8660539723317633,0.9271627402824307,0.8753133092802537
143
+ 1,60,0.9305940451634,0.8756938144068405,0.9042411558841541,0.8633126104321371,0.9041910802214976,0.8632967725650517,0.9263091887891818,0.8742831951647007
144
+ 1,65,0.9318182183862852,0.875662298078104,0.9057046181376806,0.8639821644936754,0.9056522926665176,0.8640281622247729,0.9278405530116532,0.8742755175280157
145
+ 1,70,0.9322609277237011,0.8761229321455828,0.9095617454586733,0.8655479067825836,0.9093937447572409,0.8655981756358028,0.9290885886618334,0.8747077609645147
146
+ 1,75,0.9326729085262982,0.8761598939816789,0.9106894346230207,0.865914921313883,0.9104575282501486,0.86605425455842,0.9298956387615536,0.8748786001403602
147
+ 1,80,0.9334379085522135,0.875839106665688,0.9101722763444193,0.8655545695339736,0.9099386728961762,0.8656657576093104,0.9302664948475334,0.8745968215674056
148
+ 1,85,0.9339196310116991,0.8759066894469337,0.9101036790203432,0.8656483156411037,0.9098176213722734,0.8657299926214245,0.9301820665697051,0.8745561421616361
149
+ 1,90,0.9347219746472001,0.8781562807293729,0.9124281768469428,0.8671178535045345,0.9121285601232004,0.8672168483416913,0.9312459219547113,0.8767368066255382
150
+ 1,95,0.9344617731683444,0.8790657532957731,0.9140116520596789,0.8679233927486122,0.9136732733633979,0.8680033188191053,0.9313033784645203,0.8775642613372716
151
+ 1,100,0.9348496249958712,0.8789007340410983,0.9112691382060187,0.8669402230095061,0.9109699888890022,0.8670344117984118,0.9315497430739261,0.8776005660740649
152
+ 1,105,0.9343131899097149,0.8773357060783332,0.9041552998013316,0.8637678357266624,0.9040069065243176,0.8638807484173956,0.9299465038711654,0.8763169204314455
153
+ 1,110,0.9347308868009006,0.8776154923985485,0.9067690743974312,0.8645603404217432,0.9066015243055059,0.8646967316679132,0.9306924715662486,0.8765312978762249
154
+ 1,115,0.9350770893091163,0.8773550510186073,0.9081412967715663,0.8650762445418273,0.9079835762724408,0.8651942123754675,0.9311443395646427,0.8762480718734299
155
+ 1,120,0.935070268580507,0.8768586813104297,0.9083551831511404,0.8651316087150189,0.908253492797844,0.8652961950339508,0.931040369010893,0.8756085140271975
156
+ 1,125,0.935359972149762,0.8765991869942081,0.9095020409649315,0.8657471085068306,0.909387628639486,0.8659473853340092,0.9314201847904688,0.8752660859820512
157
+ 1,130,0.9356321484895368,0.8783315417485817,0.9135049750363251,0.8681006669192459,0.9132862695758747,0.8682811412663124,0.9321733862717965,0.8769005898922744
158
+ 1,135,0.935507849480823,0.878843463977912,0.9150938847517219,0.8689891338005301,0.9148242118966199,0.8691983558698116,0.9322687039489763,0.8773572476665785
159
+ 1,140,0.9355233278546127,0.8781532692056564,0.9137912556117298,0.868117392421835,0.9135522128910659,0.8682931842121006,0.9320564419989895,0.8766822773557056
160
+ 1,145,0.935329396039449,0.8761561137955566,0.9106855975151308,0.8660698860501255,0.9105166376707309,0.8662616709773764,0.93143079424981,0.87468276487385
161
+ 1,150,0.9352179003493456,0.8749538941414978,0.9086322707028698,0.8649522971300765,0.9084970906477277,0.8651951995162867,0.9309673409527521,0.8735708714654987
162
+ 1,155,0.9352948871366611,0.8756557395427904,0.9077251804066014,0.8650045857501144,0.9076089884106283,0.8651938404752518,0.93086361536513,0.8743683613769129
163
+ 1,160,0.9356742120290937,0.8769774792288081,0.9086718620573724,0.8658913125024083,0.9085339089397839,0.8660766653210421,0.9313483260492322,0.8757086141109465
164
+ 1,165,0.9360028061833924,0.8784024849977924,0.9101352782947449,0.8671118302524301,0.9099625359631539,0.8672898735617002,0.9319043445139447,0.8771487979139988
165
+ 1,170,0.9361690573762063,0.879076867466976,0.9110188358706905,0.8677509708370087,0.9108136154855121,0.8679058993797163,0.9322212458766288,0.8778504119373721
166
+ 1,175,0.9363441003942646,0.8794949943879766,0.9119228955720963,0.8683289346394075,0.9116738020476581,0.8684696806916403,0.9325891049486027,0.8782725266336875
167
+ 1,180,0.9364451823311084,0.8796451327351942,0.9127033019581462,0.8687193483179128,0.9124277146355259,0.8688513590894591,0.9328360476819475,0.8783965028889914
168
+ 1,185,0.9364494611836099,0.8796180744786679,0.9133325894125375,0.8688831141230446,0.9130358115224646,0.8689917155291346,0.932947228837829,0.878316988663163
169
+ 1,190,0.9364657368967252,0.8797000745542436,0.9139580467008505,0.8690657889369382,0.9136522133989453,0.869202729522986,0.9330355678351065,0.8783647469976339
170
+ 1,195,0.9365321491604678,0.879800420179723,0.914151461123262,0.8691452447595166,0.9138396964450423,0.8692694135193341,0.9330925901625859,0.8784603970261452
171
+ 1,200,0.9365803078628547,0.880047052747621,0.9146463025583372,0.869452959742166,0.9143201616484207,0.869576523978811,0.9331941026057009,0.8787124162431427
172
+ 1,205,0.9366061846324072,0.8800464153243707,0.914508745872416,0.869396656458201,0.9141864844853376,0.8695258973397896,0.9331744820031067,0.8787121907560129
173
+ 1,210,0.9366218645753523,0.8799199929377025,0.9140847383196605,0.869179342848713,0.9137762293994964,0.8693044404721185,0.9331006792355034,0.878580480769756
174
+ 1,215,0.9366227756036711,0.8798848059176724,0.9139959047891416,0.8691252278761648,0.9136896594077416,0.8692555297385269,0.93308834645303,0.8785530744473242
175
+ 1,-1,0.9366204487196361,0.8798841734945619,0.9140105654399991,0.8691202632081293,0.9137041432427127,0.8692540657563095,0.93308938819303,0.878548089008091
modules.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ }
14
+ ]
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:672e482465ea3ae1884f0a8c0531b0c03eb34761ec22497869d669a87c956672
3
+ size 438022897
sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
1
+ {
2
+ "max_seq_length": 512,
3
+ "do_lower_case": false
4
+ }
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "[UNK]", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"do_lower_case": true, "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "[UNK]", "pad_token": "<pad>", "mask_token": "<mask>", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "C:\\Users\\Anwender\\Desktop\\rodion Github\\SGD-Recognizer\\src\\embedding\\..\\..\\saved_model\\sbert_trained_sdg_sim_score\\", "tokenizer_class": "MPNetTokenizer"}
vocab.txt ADDED
The diff for this file is too large to render. See raw diff