xiomarablanco commited on
Commit
ab827cb
1 Parent(s): a4c6092

modeloBert

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/1_Pooling/config.json +7 -0
  2. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/2_Dense/config.json +1 -0
  3. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/2_Dense/pytorch_model.bin +3 -0
  4. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/README.md +87 -0
  5. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/config.json +31 -0
  6. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/config_sentence_transformers.json +7 -0
  7. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/eval/similarity_evaluation_results.csv +0 -0
  8. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/modules.json +20 -0
  9. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/pytorch_model.bin +3 -0
  10. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/sentence_bert_config.json +4 -0
  11. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/special_tokens_map.json +1 -0
  12. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/tokenizer.json +0 -0
  13. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/tokenizer_config.json +1 -0
  14. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/vocab.txt +0 -0
  15. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/1_Pooling/config.json +7 -0
  16. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/2_Dense/config.json +1 -0
  17. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/2_Dense/pytorch_model.bin +3 -0
  18. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/README.md +87 -0
  19. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/config.json +31 -0
  20. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/config_sentence_transformers.json +7 -0
  21. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/eval/similarity_evaluation_results.csv +121 -0
  22. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/modules.json +20 -0
  23. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/pytorch_model.bin +3 -0
  24. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/sentence_bert_config.json +4 -0
  25. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/special_tokens_map.json +1 -0
  26. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/tokenizer.json +0 -0
  27. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/tokenizer_config.json +1 -0
  28. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/vocab.txt +0 -0
  29. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/1_Pooling/config.json +7 -0
  30. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/2_Dense/config.json +1 -0
  31. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/2_Dense/pytorch_model.bin +3 -0
  32. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/README.md +87 -0
  33. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/config.json +31 -0
  34. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/config_sentence_transformers.json +7 -0
  35. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/eval/similarity_evaluation_results.csv +13 -0
  36. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/modules.json +20 -0
  37. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/pytorch_model.bin +3 -0
  38. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/sentence_bert_config.json +4 -0
  39. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/special_tokens_map.json +1 -0
  40. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/tokenizer.json +0 -0
  41. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/tokenizer_config.json +1 -0
  42. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/vocab.txt +0 -0
  43. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/1_Pooling/config.json +7 -0
  44. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/2_Dense/config.json +1 -0
  45. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/2_Dense/pytorch_model.bin +3 -0
  46. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/README.md +87 -0
  47. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/config.json +31 -0
  48. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/config_sentence_transformers.json +7 -0
  49. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/eval/similarity_evaluation_results.csv +361 -0
  50. codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/modules.json +20 -0
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/1_Pooling/config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 768,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false
7
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/2_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 768, "out_features": 256, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/2_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8978217764f1d61d58e7a58ff4abc8e4fb0abdbc9d3b3e0d59b4df66d52740d0
3
+ size 788519
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/README.md ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ pipeline_tag: sentence-similarity
3
+ tags:
4
+ - sentence-transformers
5
+ - feature-extraction
6
+ - sentence-similarity
7
+ ---
8
+
9
+ # {MODEL_NAME}
10
+
11
+ This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 256 dimensional dense vector space and can be used for tasks like clustering or semantic search.
12
+
13
+ <!--- Describe your model here -->
14
+
15
+ ## Usage (Sentence-Transformers)
16
+
17
+ Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
18
+
19
+ ```
20
+ pip install -U sentence-transformers
21
+ ```
22
+
23
+ Then you can use the model like this:
24
+
25
+ ```python
26
+ from sentence_transformers import SentenceTransformer
27
+ sentences = ["This is an example sentence", "Each sentence is converted"]
28
+
29
+ model = SentenceTransformer('{MODEL_NAME}')
30
+ embeddings = model.encode(sentences)
31
+ print(embeddings)
32
+ ```
33
+
34
+
35
+
36
+ ## Evaluation Results
37
+
38
+ <!--- Describe how your model was evaluated -->
39
+
40
+ For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
41
+
42
+
43
+ ## Training
44
+ The model was trained with the parameters:
45
+
46
+ **DataLoader**:
47
+
48
+ `torch.utils.data.dataloader.DataLoader` of length 11 with parameters:
49
+ ```
50
+ {'batch_size': 15, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
51
+ ```
52
+
53
+ **Loss**:
54
+
55
+ `sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
56
+
57
+ Parameters of the fit()-Method:
58
+ ```
59
+ {
60
+ "epochs": 100,
61
+ "evaluation_steps": 1,
62
+ "evaluator": "sentence_transformers.evaluation.EmbeddingSimilarityEvaluator.EmbeddingSimilarityEvaluator",
63
+ "max_grad_norm": 1,
64
+ "optimizer_class": "<class 'transformers.optimization.AdamW'>",
65
+ "optimizer_params": {
66
+ "lr": 2e-05
67
+ },
68
+ "scheduler": "WarmupLinear",
69
+ "steps_per_epoch": null,
70
+ "warmup_steps": 110,
71
+ "weight_decay": 0.01
72
+ }
73
+ ```
74
+
75
+
76
+ ## Full Model Architecture
77
+ ```
78
+ SentenceTransformer(
79
+ (0): Transformer({'max_seq_length': 256, 'do_lower_case': False}) with Transformer model: BertModel
80
+ (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
81
+ (2): Dense({'in_features': 768, 'out_features': 256, 'bias': True, 'activation_function': 'torch.nn.modules.activation.Tanh'})
82
+ )
83
+ ```
84
+
85
+ ## Citing & Authors
86
+
87
+ <!--- Describe where people can find more information -->
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "bert-base-multilingual-uncased",
3
+ "architectures": [
4
+ "BertModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "directionality": "bidi",
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "pad_token_id": 0,
20
+ "pooler_fc_size": 768,
21
+ "pooler_num_attention_heads": 12,
22
+ "pooler_num_fc_layers": 3,
23
+ "pooler_size_per_head": 128,
24
+ "pooler_type": "first_token_transform",
25
+ "position_embedding_type": "absolute",
26
+ "torch_dtype": "float32",
27
+ "transformers_version": "4.18.0",
28
+ "type_vocab_size": 2,
29
+ "use_cache": true,
30
+ "vocab_size": 105879
31
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/config_sentence_transformers.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "2.2.0",
4
+ "transformers": "4.18.0",
5
+ "pytorch": "1.11.0+cpu"
6
+ }
7
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/eval/similarity_evaluation_results.csv ADDED
The diff for this file is too large to render. See raw diff
 
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/modules.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ },
14
+ {
15
+ "idx": 2,
16
+ "name": "2",
17
+ "path": "2_Dense",
18
+ "type": "sentence_transformers.models.Dense"
19
+ }
20
+ ]
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86fbc023c2bd9c6f9648e4e6ae0ade9495689dd6cafe4b03bb19eb77257838ed
3
+ size 669492273
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 256,
3
+ "do_lower_case": false
4
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "bert-base-multilingual-uncased", "tokenizer_class": "BertTokenizer"}
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/100_Epochs/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
 
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/1_Pooling/config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 768,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false
7
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/2_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 768, "out_features": 256, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/2_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b3ea3fe59a73cbfb81e01d9e393f86175cd0e83c912495afa5c523fe47bb913
3
+ size 788519
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/README.md ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ pipeline_tag: sentence-similarity
3
+ tags:
4
+ - sentence-transformers
5
+ - feature-extraction
6
+ - sentence-similarity
7
+ ---
8
+
9
+ # {MODEL_NAME}
10
+
11
+ This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 256 dimensional dense vector space and can be used for tasks like clustering or semantic search.
12
+
13
+ <!--- Describe your model here -->
14
+
15
+ ## Usage (Sentence-Transformers)
16
+
17
+ Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
18
+
19
+ ```
20
+ pip install -U sentence-transformers
21
+ ```
22
+
23
+ Then you can use the model like this:
24
+
25
+ ```python
26
+ from sentence_transformers import SentenceTransformer
27
+ sentences = ["This is an example sentence", "Each sentence is converted"]
28
+
29
+ model = SentenceTransformer('{MODEL_NAME}')
30
+ embeddings = model.encode(sentences)
31
+ print(embeddings)
32
+ ```
33
+
34
+
35
+
36
+ ## Evaluation Results
37
+
38
+ <!--- Describe how your model was evaluated -->
39
+
40
+ For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
41
+
42
+
43
+ ## Training
44
+ The model was trained with the parameters:
45
+
46
+ **DataLoader**:
47
+
48
+ `torch.utils.data.dataloader.DataLoader` of length 11 with parameters:
49
+ ```
50
+ {'batch_size': 15, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
51
+ ```
52
+
53
+ **Loss**:
54
+
55
+ `sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
56
+
57
+ Parameters of the fit()-Method:
58
+ ```
59
+ {
60
+ "epochs": 10,
61
+ "evaluation_steps": 1,
62
+ "evaluator": "sentence_transformers.evaluation.EmbeddingSimilarityEvaluator.EmbeddingSimilarityEvaluator",
63
+ "max_grad_norm": 1,
64
+ "optimizer_class": "<class 'transformers.optimization.AdamW'>",
65
+ "optimizer_params": {
66
+ "lr": 2e-05
67
+ },
68
+ "scheduler": "WarmupLinear",
69
+ "steps_per_epoch": null,
70
+ "warmup_steps": 11,
71
+ "weight_decay": 0.01
72
+ }
73
+ ```
74
+
75
+
76
+ ## Full Model Architecture
77
+ ```
78
+ SentenceTransformer(
79
+ (0): Transformer({'max_seq_length': 256, 'do_lower_case': False}) with Transformer model: BertModel
80
+ (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
81
+ (2): Dense({'in_features': 768, 'out_features': 256, 'bias': True, 'activation_function': 'torch.nn.modules.activation.Tanh'})
82
+ )
83
+ ```
84
+
85
+ ## Citing & Authors
86
+
87
+ <!--- Describe where people can find more information -->
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "bert-base-multilingual-uncased",
3
+ "architectures": [
4
+ "BertModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "directionality": "bidi",
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "pad_token_id": 0,
20
+ "pooler_fc_size": 768,
21
+ "pooler_num_attention_heads": 12,
22
+ "pooler_num_fc_layers": 3,
23
+ "pooler_size_per_head": 128,
24
+ "pooler_type": "first_token_transform",
25
+ "position_embedding_type": "absolute",
26
+ "torch_dtype": "float32",
27
+ "transformers_version": "4.18.0",
28
+ "type_vocab_size": 2,
29
+ "use_cache": true,
30
+ "vocab_size": 105879
31
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/config_sentence_transformers.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "2.2.0",
4
+ "transformers": "4.18.0",
5
+ "pytorch": "1.11.0+cpu"
6
+ }
7
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/eval/similarity_evaluation_results.csv ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
2
+ 0,1,0.5510837085958704,0.3968021576053897,0.4866768806536414,0.3240961722056444,0.46696138550296984,0.30930851415823857,0.42881692105546887,0.39556985276810597
3
+ 0,2,0.5514234919862097,0.4041959866290927,0.4876711246573946,0.3240961722056444,0.4667126738906574,0.30930851415823857,0.4308571170301959,0.39556985276810597
4
+ 0,3,0.5523502149862911,0.4041959866290927,0.4901627998904127,0.3240961722056444,0.46748711236784196,0.30930851415823857,0.437411686849181,0.3931052430935383
5
+ 0,4,0.5560900269257757,0.42514516886291764,0.4964607449288063,0.3240961722056444,0.47119136056012345,0.30930851415823857,0.4465660599307273,0.388176023744403
6
+ 0,5,0.5586317493761919,0.43007438821205285,0.5023428562356852,0.31177312383280625,0.4744427525663887,0.3265607818802121,0.4582389776468036,0.37954988988341626
7
+ 0,6,0.5611074041501576,0.43007438821205285,0.5085979608010087,0.35120687862588845,0.47724634861066534,0.3265607818802121,0.47512205864079254,0.39803446244267354
8
+ 0,7,0.570357051301394,0.4682758381678513,0.5257467738774403,0.41035751081551175,0.489733188092417,0.3857114140698354,0.4941240906758455,0.43007438821205285
9
+ 0,8,0.5842355007491221,0.4781342768661218,0.549187168081703,0.4313066930493367,0.510762657128796,0.43377130272390435,0.5152297049186645,0.42514516886291764
10
+ 0,9,0.6073401773331741,0.4818311913779733,0.5832143935848383,0.4978511542626629,0.545082866723011,0.4460943510967425,0.529722212934852,0.4288420833747691
11
+ 0,10,0.6256090740635896,0.4473266559340263,0.6305950821798517,0.5089418977982173,0.6033215972239367,0.5077095929609334,0.49431113435447843,0.39803446244267354
12
+ 0,11,0.539995392812748,0.43377130272390435,0.5429445890188159,0.5286587751947585,0.531946001393508,0.5089418977982173,0.43636827104684134,0.38078219472070013
13
+ 0,-1,0.539995392812748,0.43377130272390435,0.5429445890188159,0.5286587751947585,0.531946001393508,0.5089418977982173,0.43636827104684134,0.38078219472070013
14
+ 1,1,0.5697946690569602,0.47197275267970273,0.5898082150881057,0.5348202993811775,0.5775814051616905,0.5606987009641377,0.3932684708933039,0.33765152541576643
15
+ 1,2,0.5949577813125145,0.565627920313273,0.5703982473672013,0.531123384869326,0.5643213225904933,0.5409818235675966,0.25823710459069427,0.24522866261948
16
+ 1,3,0.597411034247368,0.5188003364964878,0.577301022392694,0.5496079574285834,0.5756073224587656,0.552072567103151,0.0415743225012407,-0.057918327352339496
17
+ 1,4,0.6027952955197176,0.538517213893029,0.5863354850460368,0.5360526042184613,0.5884109874855517,0.571789444499692,0.02991228953048819,-0.08749364344715116
18
+ 1,5,0.6137912559488308,0.5643956154759892,0.5987236173523529,0.5865771025470979,0.6027496468494887,0.5828801880352464,0.06667874976534509,-0.0714736805624615
19
+ 1,6,0.6227477099414361,0.579183273523395,0.6074172375211828,0.538517213893029,0.6120715381223909,0.5619310058014215,0.1224390022763863,0.03573684028123075
20
+ 1,7,0.6284556792076046,0.5952032364080846,0.6126661536676583,0.5138711171473526,0.6178866739685748,0.5767186638488274,0.24225689351245971,0.1934718594535596
21
+ 1,8,0.6314402452870561,0.5952032364080846,0.6179498090857204,0.5335879945438936,0.6239620423007652,0.5767186638488274,0.3853711798981112,0.33149000122934735
22
+ 1,9,0.6310121444950654,0.5952032364080846,0.6188264929729858,0.5643956154759892,0.6242522723243449,0.5606987009641377,0.532250795337432,0.4818311913779733
23
+ 1,10,0.6326114690775445,0.592738626733517,0.6212141432949196,0.5643956154759892,0.6269472606584178,0.5606987009641377,0.5761504627598523,0.5754863590115435
24
+ 1,11,0.6253124709048146,0.579183273523395,0.6160077486645957,0.5619310058014215,0.6204000032271622,0.5606987009641377,0.586113399033033,0.5693248348251244
25
+ 1,-1,0.6253124709048146,0.579183273523395,0.6160077486645957,0.5619310058014215,0.6204000032271622,0.5606987009641377,0.586113399033033,0.5693248348251244
26
+ 2,1,0.5998093057862667,0.5483756525912995,0.6030977594070143,0.5865771025470979,0.6022666983515267,0.5853447977098141,0.5253636652642629,0.4670435333305674
27
+ 2,2,0.5552654108791196,0.48552810588982476,0.578772531025639,0.5853447977098141,0.5734943267609146,0.5335879945438936,0.4266595132881985,0.33765152541576643
28
+ 2,3,0.512970450248535,0.4263774737002014,0.5473152873049926,0.4929219349135277,0.5370196638515226,0.4879927155643924,0.3442433159809186,0.26864245452787255
29
+ 2,4,0.48535064579469067,0.43500360756118817,0.5185644260596649,0.4707404478424189,0.5071139763123275,0.4313066930493367,0.28104744908981477,0.16512884819603177
30
+ 2,5,0.4879123604026313,0.4202159495137823,0.5109842900952186,0.45595278979501314,0.49893588401846795,0.4288420833747691,0.24745866087065427,0.19100724977899197
31
+ 2,6,0.48630858431310986,0.4202159495137823,0.5056838217634425,0.45595278979501314,0.4925339400221209,0.4288420833747691,0.21573606770117898,0.16512884819603177
32
+ 2,7,0.4863495780433798,0.39556985276810597,0.4964814853641809,0.4263774737002014,0.48057344387430184,0.41898364467649857,0.1703588908426789,0.10474591116912464
33
+ 2,8,0.5388726608025954,0.4707404478424189,0.5452510216369596,0.46581122849328366,0.5346172207810076,0.4374682172357558,0.16383604353933445,0.07517059507431297
34
+ 2,9,0.5552083049652459,0.49045732523896,0.5545944526841096,0.49538654458809533,0.5430879035829175,0.4793665817034057,0.1591295586788838,0.054221412840488044
35
+ 2,10,0.5601494808821526,0.5151034219846364,0.5603574022437795,0.5151034219846364,0.5461070974693037,0.5126388123100688,0.14345451279944904,0.08872594828443497
36
+ 2,11,0.6041062164912168,0.6186170283164771,0.6195860213357737,0.6395662105503022,0.6066503101514819,0.6371016008757345,0.10249439621208631,0.0714736805624615
37
+ 2,-1,0.6041062164912168,0.6186170283164771,0.6195860213357737,0.6395662105503022,0.6066503101514819,0.6371016008757345,0.10249439621208631,0.0714736805624615
38
+ 3,1,0.632947560609662,0.6321723815265992,0.6465892484913411,0.619849333153761,0.6356210899499349,0.6284754670147478,0.08866845129378909,0.03943375479308221
39
+ 3,2,0.6862694745776611,0.6900907088789388,0.7021820684855775,0.6876260992043711,0.6958673484359519,0.7011814524144931,0.05025590845555464,0.011090743535554372
40
+ 3,3,0.6151359435695662,0.6321723815265992,0.5572203223854006,0.5052449832863658,0.5656562206975861,0.4818311913779733,-0.003293083714116768,-0.15896732400961266
41
+ 3,4,0.5783450170705552,0.5409818235675966,0.5290750786596177,0.46950814300513505,0.5320865880462885,0.45225587528316163,0.04135357592380551,-0.025878401582960203
42
+ 3,5,0.5904045708397223,0.5619310058014215,0.5501429765894301,0.43500360756118817,0.5540737600276846,0.48059888654068944,0.09906062422268205,-0.006161524186419096
43
+ 3,6,0.5796923918753482,0.5372849090557451,0.5234358373770523,0.49045732523896,0.5295242361408734,0.47566966719155424,0.10332929239105448,0.009858438698270553
44
+ 3,7,0.5717918316602357,0.5508402622658671,0.5291209590820241,0.49538654458809533,0.5273724664995149,0.48306349621525707,0.3178498660086813,0.33765152541576643
45
+ 3,8,0.5930254955230083,0.5742540541742597,0.624834649140961,0.6050616751063552,0.6250932307216546,0.6371016008757345,0.3164129514807906,0.2908239415989813
46
+ 3,9,0.5992082211725787,0.613687808967342,0.6509677084133334,0.6494246492485727,0.6506743523994021,0.6297077718520316,0.3137090711029066,0.306843904483671
47
+ 3,10,0.46842354838223405,0.4781342768661218,0.4820490858897944,0.46457892365599984,0.4747366632709158,0.44116513174760724,0.4420847547749069,0.4534881801204454
48
+ 3,11,0.46856869376340327,0.45225587528316163,0.4782847369970975,0.46457892365599984,0.4736507848352967,0.4362359123984719,0.4479415966105594,0.46950814300513505
49
+ 3,-1,0.46856869376340327,0.45225587528316163,0.4782847369970975,0.46457892365599984,0.4736507848352967,0.4362359123984719,0.4479415966105594,0.46950814300513505
50
+ 4,1,0.5017184348369577,0.4584173994695807,0.5047490273771291,0.46581122849328366,0.5006033380854505,0.4263774737002014,0.4820192258429915,0.5101742026355011
51
+ 4,2,0.5450365089487179,0.4793665817034057,0.5377444406276013,0.46334661881871597,0.5318618284663998,0.4596497043068645,0.528778529234613,0.4596497043068645
52
+ 4,3,0.5674253293978494,0.5064772881236497,0.554368994449296,0.544678738079448,0.5472338490490051,0.5224972510083393,0.548382525464233,0.5101742026355011
53
+ 4,4,0.5744334540656505,0.55823409128957,0.5588211071402137,0.5570017864522863,0.5500828148243082,0.6050616751063552,0.5402223132513422,0.5730217493369759
54
+ 4,5,0.5642520518612463,0.565627920313273,0.5504643489393084,0.5705571396624082,0.5416519865080227,0.6087585896182066,0.5085694449392926,0.5631633106387054
55
+ 4,6,0.5650089363914159,0.5853447977098141,0.551017763441745,0.5705571396624082,0.5421671352933951,0.6075262847809227,0.5126633613350388,0.5631633106387054
56
+ 4,7,0.5618688870279428,0.5853447977098141,0.5483355075616734,0.5508402622658671,0.5397014771073516,0.6075262847809227,0.5091466660005899,0.5631633106387054
57
+ 4,8,0.5573618568941934,0.6075262847809227,0.5441034626595582,0.5459110429167319,0.5355104488845672,0.6235462476656125,0.5082039243905155,0.5471433477540156
58
+ 4,9,0.5537992167197551,0.6025970654317876,0.5405529637791663,0.5816478831979626,0.5317099056028649,0.6235462476656125,0.5057382007772027,0.5508402622658671
59
+ 4,10,0.5552958393016998,0.6025970654317876,0.54056960930236,0.5816478831979626,0.5316349853235263,0.627243162177464,0.5152503140862389,0.5508402622658671
60
+ 4,11,0.5561013154127618,0.6075262847809227,0.539933137919768,0.5816478831979626,0.530958550128973,0.646960039574005,0.5224122415611594,0.5508402622658671
61
+ 4,-1,0.5561013154127618,0.6075262847809227,0.539933137919768,0.5816478831979626,0.530958550128973,0.646960039574005,0.5224122415611594,0.5508402622658671
62
+ 5,1,0.5627684953227874,0.6112231992927742,0.5440382518355383,0.6112231992927742,0.5348055206376432,0.6383339057130183,0.5398777729117984,0.5853447977098141
63
+ 5,2,0.566392475904452,0.6112231992927742,0.5458443041199939,0.6112231992927742,0.5362566803641474,0.6383339057130183,0.551414365892373,0.6050616751063552
64
+ 5,3,0.5711849190360596,0.5952032364080846,0.5492224456506596,0.6112231992927742,0.5392103959338517,0.633404686363883,0.5657199362352512,0.6321723815265992
65
+ 5,4,0.5665273737748436,0.6112231992927742,0.5444090051606381,0.6112231992927742,0.5338321596292822,0.6716061363196814,0.5638592255838545,0.627243162177464
66
+ 5,5,0.5587325723321432,0.6112231992927742,0.536801867057136,0.619849333153761,0.525864071910075,0.6716061363196814,0.5589685159257272,0.6050616751063552
67
+ 5,6,0.5588660825407739,0.6112231992927742,0.5367015851776478,0.619849333153761,0.5252166423356179,0.6321723815265992,0.5664047038653385,0.613687808967342
68
+ 5,7,0.5594962879301095,0.6112231992927742,0.5376917147751932,0.606293979943639,0.5251485399964694,0.6321723815265992,0.5736522776161022,0.6235462476656125
69
+ 5,8,0.5677007615641713,0.5939709315708008,0.5471066515490044,0.6112231992927742,0.5337823948911702,0.6432631250621536,0.5828613377838122,0.5902740170589494
70
+ 5,9,0.5712085511733362,0.627243162177464,0.551538696180853,0.6112231992927742,0.5376598235082796,0.6235462476656125,0.5848433860545443,0.565627920313273
71
+ 5,10,0.5795120942929327,0.5989001509199361,0.5627194157321366,0.6112231992927742,0.5488866615217671,0.627243162177464,0.5862671505045381,0.5483756525912995
72
+ 5,11,0.5773774275773207,0.5989001509199361,0.56309871934753,0.6112231992927742,0.5497730902980366,0.627243162177464,0.5797328270428812,0.5434464332421642
73
+ 5,-1,0.5773774275773207,0.5989001509199361,0.56309871934753,0.6112231992927742,0.5497730902980366,0.627243162177464,0.5797328270428812,0.5434464332421642
74
+ 6,1,0.5724529970345684,0.60013245575722,0.5610981085675698,0.5643956154759892,0.549113834887397,0.6149201138046257,0.5738061802076435,0.5434464332421642
75
+ 6,2,0.5688961516759937,0.60013245575722,0.5599344384915266,0.5754863590115435,0.5491576170091402,0.6087585896182066,0.5690310109191177,0.5237295558456231
76
+ 6,3,0.5669692713593304,0.5853447977098141,0.5610553176314215,0.5594663961268539,0.5519413633808866,0.5496079574285834,0.5654499856653671,0.4929219349135277
77
+ 6,4,0.5660474948480647,0.5853447977098141,0.5622949187524386,0.5545371767777186,0.5541649547036109,0.565627920313273,0.5633861995437216,0.4929219349135277
78
+ 6,5,0.5666119221186319,0.5853447977098141,0.564420468150526,0.5496079574285834,0.5573844884862335,0.5409818235675966,0.5630135906624258,0.4929219349135277
79
+ 6,6,0.5672076060967504,0.5890417122216656,0.5662776185741675,0.5496079574285834,0.5603169703018425,0.5545371767777186,0.5631436736488782,0.46334661881871597
80
+ 6,7,0.5650060512877118,0.5730217493369759,0.5672060643762817,0.5804155783606788,0.5630902743632267,0.5545371767777186,0.558445627251506,0.47443736235427036
81
+ 6,8,0.5580309136899538,0.5816478831979626,0.5651659988575997,0.5890417122216656,0.5625736484742657,0.5853447977098141,0.5462566486384084,0.49168963007624383
82
+ 6,9,0.5531941658995305,0.5816478831979626,0.5625465864479664,0.5730217493369759,0.5606666833237387,0.5890417122216656,0.5396043175221636,0.49168963007624383
83
+ 6,10,0.5529707639762464,0.5902740170589494,0.56228000797703,0.5890417122216656,0.560306933858571,0.5853447977098141,0.5402543655392967,0.49168963007624383
84
+ 6,11,0.5477087574064676,0.5680925299878407,0.5582742174633427,0.5890417122216656,0.5565010379186348,0.5853447977098141,0.5356287030143897,0.49168963007624383
85
+ 6,-1,0.5477087574064676,0.5680925299878407,0.5582742174633427,0.5890417122216656,0.5565010379186348,0.5853447977098141,0.5356287030143897,0.49168963007624383
86
+ 7,1,0.5398859293856905,0.579183273523395,0.5533367324317767,0.5730217493369759,0.5515376577848767,0.5890417122216656,0.5262569892456791,0.5064772881236497
87
+ 7,2,0.5356500416898937,0.5570017864522863,0.5505551248852882,0.5730217493369759,0.5480517778865585,0.5890417122216656,0.5219033217578573,0.4966188494253791
88
+ 7,3,0.5340962343090312,0.5680925299878407,0.549496930446022,0.5964355412453685,0.5461496629468282,0.5890417122216656,0.5210681217097879,0.4966188494253791
89
+ 7,4,0.5334134887186839,0.5680925299878407,0.5487770163197523,0.5964355412453685,0.544497311931601,0.5853447977098141,0.5206810574879331,0.4966188494253791
90
+ 7,5,0.5346609700256021,0.5890417122216656,0.549066604867183,0.5705571396624082,0.5435989876105213,0.5631633106387054,0.5220502072174203,0.4966188494253791
91
+ 7,6,0.5314321374760812,0.5890417122216656,0.5464248390221759,0.5705571396624082,0.5402422477550086,0.55823409128957,0.5189295562902643,0.4966188494253791
92
+ 7,7,0.5290441317021757,0.5890417122216656,0.5437555755250387,0.5705571396624082,0.5369658128113507,0.55823409128957,0.5167280772910248,0.4966188494253791
93
+ 7,8,0.5263369922472247,0.5804155783606788,0.5404174842894404,0.5705571396624082,0.5332166041269493,0.55823409128957,0.5152732314560662,0.46950814300513505
94
+ 7,9,0.5223588960731601,0.5804155783606788,0.5364871194729721,0.5705571396624082,0.5289907447066081,0.55823409128957,0.5147453570107013,0.4682758381678513
95
+ 7,10,0.5185508121701942,0.592738626733517,0.5328033399485353,0.5705571396624082,0.5253029522226992,0.544678738079448,0.5145802934776373,0.4879927155643924
96
+ 7,11,0.5153514554083487,0.592738626733517,0.5298198907305951,0.5705571396624082,0.5222850026273846,0.544678738079448,0.5122059045141544,0.4879927155643924
97
+ 7,-1,0.5153514554083487,0.592738626733517,0.5298198907305951,0.5705571396624082,0.5222850026273846,0.544678738079448,0.5122059045141544,0.4879927155643924
98
+ 8,1,0.5139663216208921,0.592738626733517,0.5287273765178674,0.5705571396624082,0.5213106719221071,0.5533048719404349,0.5110755535486861,0.4879927155643924
99
+ 8,2,0.5127249342094368,0.592738626733517,0.5277481456530929,0.5705571396624082,0.520496001662684,0.5533048719404349,0.5109556426985764,0.4879927155643924
100
+ 8,3,0.5122664058219184,0.592738626733517,0.5273551020489614,0.5705571396624082,0.5201061478412953,0.544678738079448,0.5110816731845834,0.4879927155643924
101
+ 8,4,0.511125118412488,0.592738626733517,0.5270077080343187,0.5853447977098141,0.5197999129262019,0.565627920313273,0.5094321553149344,0.4879927155643924
102
+ 8,5,0.5101377774648597,0.592738626733517,0.5262543533004428,0.5853447977098141,0.5192033430878605,0.565627920313273,0.5084023071017213,0.48059888654068944
103
+ 8,6,0.5088623522806643,0.592738626733517,0.5255275411056672,0.5853447977098141,0.518704775966622,0.565627920313273,0.5072654501960409,0.48059888654068944
104
+ 8,7,0.5078847560790773,0.6075262847809227,0.5250677141154176,0.5853447977098141,0.5184800197214552,0.565627920313273,0.506528284382887,0.4978511542626629
105
+ 8,8,0.5065560133871756,0.6075262847809227,0.5247646546987356,0.5853447977098141,0.5182624122608466,0.565627920313273,0.5050935773855989,0.4978511542626629
106
+ 8,9,0.5047023708420542,0.6075262847809227,0.524227381832177,0.5754863590115435,0.5178729608106187,0.565627920313273,0.5033616878410366,0.48059888654068944
107
+ 8,10,0.5033842675166709,0.6075262847809227,0.524396329413174,0.5754863590115435,0.5183129356877965,0.5705571396624082,0.501778133719041,0.4534881801204454
108
+ 8,11,0.50194453163122,0.6013647605945037,0.5242905236756141,0.5754863590115435,0.5186743164155508,0.5754863590115435,0.5001258281093071,0.47566966719155424
109
+ 8,-1,0.50194453163122,0.6013647605945037,0.5242905236756141,0.5754863590115435,0.5186743164155508,0.5754863590115435,0.5001258281093071,0.47566966719155424
110
+ 9,1,0.5006963650369957,0.6013647605945037,0.5239630591609693,0.5976678460826522,0.5185161209564879,0.5754863590115435,0.4986418491475127,0.46457892365599984
111
+ 9,2,0.5002683078632423,0.6013647605945037,0.5237447522270224,0.5976678460826522,0.5182842446933333,0.5754863590115435,0.4982948870937328,0.44486204625945874
112
+ 9,3,0.5003772719602404,0.6013647605945037,0.5233987937931777,0.5976678460826522,0.5177945528310127,0.5754863590115435,0.49880864352560256,0.47566966719155424
113
+ 9,4,0.4999926194599982,0.5976678460826522,0.5227096826908241,0.5754863590115435,0.5170549410875386,0.5754863590115435,0.4986814199349069,0.4707404478424189
114
+ 9,5,0.49936795066864637,0.5976678460826522,0.5219874136966964,0.5754863590115435,0.5163617711485672,0.5754863590115435,0.49811139492262746,0.4929219349135277
115
+ 9,6,0.4983295014766385,0.5976678460826522,0.5211236271641506,0.5754863590115435,0.5155503608126808,0.5754863590115435,0.497059829201155,0.4929219349135277
116
+ 9,7,0.49754026048532574,0.5976678460826522,0.5203447110751969,0.5754863590115435,0.5147705306792452,0.5754863590115435,0.49626096364939803,0.4929219349135277
117
+ 9,8,0.49678908222566853,0.5976678460826522,0.5198116568037813,0.5754863590115435,0.5142135547506604,0.5754863590115435,0.49530129464010336,0.4929219349135277
118
+ 9,9,0.4962790724827963,0.5976678460826522,0.5194156841957991,0.5754863590115435,0.5137845271553748,0.5754863590115435,0.4946662998060217,0.4707404478424189
119
+ 9,10,0.49608358249629053,0.5976678460826522,0.5192166758003558,0.5754863590115435,0.5135559405015376,0.5754863590115435,0.4943152922605989,0.4707404478424189
120
+ 9,11,0.4959880550190612,0.5976678460826522,0.5191008255491054,0.5754863590115435,0.51342668057616,0.5754863590115435,0.49415031278663635,0.4707404478424189
121
+ 9,-1,0.4959880550190612,0.5976678460826522,0.5191008255491054,0.5754863590115435,0.51342668057616,0.5754863590115435,0.49415031278663635,0.4707404478424189
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/modules.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ },
14
+ {
15
+ "idx": 2,
16
+ "name": "2",
17
+ "path": "2_Dense",
18
+ "type": "sentence_transformers.models.Dense"
19
+ }
20
+ ]
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af0b0f4857532fde7968b328cb2ca01f25a8e1c933f5e8867858c6157067b6fb
3
+ size 669492273
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 256,
3
+ "do_lower_case": false
4
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "bert-base-multilingual-uncased", "tokenizer_class": "BertTokenizer"}
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/10_Epochs/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
 
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/1_Pooling/config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 768,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false
7
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/2_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 768, "out_features": 256, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/2_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:729b96a8087db23a505dd6b75539f8c065a733283874befcd8756113f3aeb554
3
+ size 788519
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/README.md ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ pipeline_tag: sentence-similarity
3
+ tags:
4
+ - sentence-transformers
5
+ - feature-extraction
6
+ - sentence-similarity
7
+ ---
8
+
9
+ # {MODEL_NAME}
10
+
11
+ This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 256 dimensional dense vector space and can be used for tasks like clustering or semantic search.
12
+
13
+ <!--- Describe your model here -->
14
+
15
+ ## Usage (Sentence-Transformers)
16
+
17
+ Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
18
+
19
+ ```
20
+ pip install -U sentence-transformers
21
+ ```
22
+
23
+ Then you can use the model like this:
24
+
25
+ ```python
26
+ from sentence_transformers import SentenceTransformer
27
+ sentences = ["This is an example sentence", "Each sentence is converted"]
28
+
29
+ model = SentenceTransformer('{MODEL_NAME}')
30
+ embeddings = model.encode(sentences)
31
+ print(embeddings)
32
+ ```
33
+
34
+
35
+
36
+ ## Evaluation Results
37
+
38
+ <!--- Describe how your model was evaluated -->
39
+
40
+ For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
41
+
42
+
43
+ ## Training
44
+ The model was trained with the parameters:
45
+
46
+ **DataLoader**:
47
+
48
+ `torch.utils.data.dataloader.DataLoader` of length 11 with parameters:
49
+ ```
50
+ {'batch_size': 15, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
51
+ ```
52
+
53
+ **Loss**:
54
+
55
+ `sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
56
+
57
+ Parameters of the fit()-Method:
58
+ ```
59
+ {
60
+ "epochs": 1,
61
+ "evaluation_steps": 1,
62
+ "evaluator": "sentence_transformers.evaluation.EmbeddingSimilarityEvaluator.EmbeddingSimilarityEvaluator",
63
+ "max_grad_norm": 1,
64
+ "optimizer_class": "<class 'transformers.optimization.AdamW'>",
65
+ "optimizer_params": {
66
+ "lr": 2e-05
67
+ },
68
+ "scheduler": "WarmupLinear",
69
+ "steps_per_epoch": null,
70
+ "warmup_steps": 2,
71
+ "weight_decay": 0.01
72
+ }
73
+ ```
74
+
75
+
76
+ ## Full Model Architecture
77
+ ```
78
+ SentenceTransformer(
79
+ (0): Transformer({'max_seq_length': 256, 'do_lower_case': False}) with Transformer model: BertModel
80
+ (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
81
+ (2): Dense({'in_features': 768, 'out_features': 256, 'bias': True, 'activation_function': 'torch.nn.modules.activation.Tanh'})
82
+ )
83
+ ```
84
+
85
+ ## Citing & Authors
86
+
87
+ <!--- Describe where people can find more information -->
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "bert-base-multilingual-uncased",
3
+ "architectures": [
4
+ "BertModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "directionality": "bidi",
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "pad_token_id": 0,
20
+ "pooler_fc_size": 768,
21
+ "pooler_num_attention_heads": 12,
22
+ "pooler_num_fc_layers": 3,
23
+ "pooler_size_per_head": 128,
24
+ "pooler_type": "first_token_transform",
25
+ "position_embedding_type": "absolute",
26
+ "torch_dtype": "float32",
27
+ "transformers_version": "4.18.0",
28
+ "type_vocab_size": 2,
29
+ "use_cache": true,
30
+ "vocab_size": 105879
31
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/config_sentence_transformers.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "2.2.0",
4
+ "transformers": "4.18.0",
5
+ "pytorch": "1.11.0+cpu"
6
+ }
7
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/eval/similarity_evaluation_results.csv ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
2
+ 0,1,0.5959615105801458,0.5064772881236497,0.5228185957687582,0.3770852802088487,0.5252796271556653,0.37585297537156487,0.3656436364037719,0.30068238029725186
3
+ 0,2,0.5981535868545533,0.5200326413337716,0.5242416698867759,0.37215606085971337,0.524238654740314,0.4165190350019308,0.38138210931870387,0.36352992699872666
4
+ 0,3,0.5973291070551235,0.5483756525912995,0.5247035251258387,0.43500360756118817,0.523348008681698,0.47197275267970273,0.4501905550218964,0.43007438821205285
5
+ 0,4,0.6282994713252422,0.5841124928725302,0.562700000931956,0.4892250204016762,0.5608092292569061,0.4818311913779733,0.48070626858213195,0.4596497043068645
6
+ 0,5,0.6474778083623832,0.5989001509199361,0.6025758394435041,0.5151034219846364,0.5976024892172822,0.5298910800320422,0.4917403004961885,0.4584173994695807
7
+ 0,6,0.6484546200136605,0.5631633106387054,0.6494936012474827,0.5964355412453685,0.6384853052450463,0.5274264703574746,0.44903482902365793,0.43377130272390435
8
+ 0,7,0.6493645858512357,0.5964355412453685,0.652332526462173,0.6149201138046257,0.6376911091481744,0.5533048719404349,0.5098575465282467,0.4978511542626629
9
+ 0,8,0.6279096502597487,0.613687808967342,0.6325123233233435,0.5693248348251244,0.623688263302,0.5274264703574746,0.542343196149572,0.4966188494253791
10
+ 0,9,0.6422348500591497,0.613687808967342,0.6513380386206455,0.6247785525028963,0.6346093536793497,0.5496079574285834,0.5536814340656011,0.5027803736117983
11
+ 0,10,0.6326273192313862,0.627243162177464,0.6462049016538769,0.6161524186419096,0.6313819517043631,0.5533048719404349,0.5453938384166883,0.504012678449082
12
+ 0,11,0.6218699417316709,0.5693248348251244,0.640019470832415,0.5964355412453685,0.6261231080703131,0.5533048719404349,0.5316339865286299,0.4929219349135277
13
+ 0,-1,0.6218699417316709,0.5693248348251244,0.640019470832415,0.5964355412453685,0.6261231080703131,0.5533048719404349,0.5316339865286299,0.4929219349135277
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/modules.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ },
14
+ {
15
+ "idx": 2,
16
+ "name": "2",
17
+ "path": "2_Dense",
18
+ "type": "sentence_transformers.models.Dense"
19
+ }
20
+ ]
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b474191ae95f5b629faf469577af2c3f80ac026502307e0abf195c32fdbbe739
3
+ size 669492273
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/sentence_bert_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "max_seq_length": 256,
3
+ "do_lower_case": false
4
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "bert-base-multilingual-uncased", "tokenizer_class": "BertTokenizer"}
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/1_Epochs/vocab.txt ADDED
The diff for this file is too large to render. See raw diff
 
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/1_Pooling/config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "word_embedding_dimension": 768,
3
+ "pooling_mode_cls_token": false,
4
+ "pooling_mode_mean_tokens": true,
5
+ "pooling_mode_max_tokens": false,
6
+ "pooling_mode_mean_sqrt_len_tokens": false
7
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/2_Dense/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"in_features": 768, "out_features": 256, "bias": true, "activation_function": "torch.nn.modules.activation.Tanh"}
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/2_Dense/pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9385f17ff7bae92f6f7682d3c806d33ac1b6bd04aa9d496f9c6f78d3a213f39e
3
+ size 788519
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/README.md ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ pipeline_tag: sentence-similarity
3
+ tags:
4
+ - sentence-transformers
5
+ - feature-extraction
6
+ - sentence-similarity
7
+ ---
8
+
9
+ # {MODEL_NAME}
10
+
11
+ This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 256 dimensional dense vector space and can be used for tasks like clustering or semantic search.
12
+
13
+ <!--- Describe your model here -->
14
+
15
+ ## Usage (Sentence-Transformers)
16
+
17
+ Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
18
+
19
+ ```
20
+ pip install -U sentence-transformers
21
+ ```
22
+
23
+ Then you can use the model like this:
24
+
25
+ ```python
26
+ from sentence_transformers import SentenceTransformer
27
+ sentences = ["This is an example sentence", "Each sentence is converted"]
28
+
29
+ model = SentenceTransformer('{MODEL_NAME}')
30
+ embeddings = model.encode(sentences)
31
+ print(embeddings)
32
+ ```
33
+
34
+
35
+
36
+ ## Evaluation Results
37
+
38
+ <!--- Describe how your model was evaluated -->
39
+
40
+ For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
41
+
42
+
43
+ ## Training
44
+ The model was trained with the parameters:
45
+
46
+ **DataLoader**:
47
+
48
+ `torch.utils.data.dataloader.DataLoader` of length 11 with parameters:
49
+ ```
50
+ {'batch_size': 15, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
51
+ ```
52
+
53
+ **Loss**:
54
+
55
+ `sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
56
+
57
+ Parameters of the fit()-Method:
58
+ ```
59
+ {
60
+ "epochs": 30,
61
+ "evaluation_steps": 1,
62
+ "evaluator": "sentence_transformers.evaluation.EmbeddingSimilarityEvaluator.EmbeddingSimilarityEvaluator",
63
+ "max_grad_norm": 1,
64
+ "optimizer_class": "<class 'transformers.optimization.AdamW'>",
65
+ "optimizer_params": {
66
+ "lr": 2e-05
67
+ },
68
+ "scheduler": "WarmupLinear",
69
+ "steps_per_epoch": null,
70
+ "warmup_steps": 33,
71
+ "weight_decay": 0.01
72
+ }
73
+ ```
74
+
75
+
76
+ ## Full Model Architecture
77
+ ```
78
+ SentenceTransformer(
79
+ (0): Transformer({'max_seq_length': 256, 'do_lower_case': False}) with Transformer model: BertModel
80
+ (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
81
+ (2): Dense({'in_features': 768, 'out_features': 256, 'bias': True, 'activation_function': 'torch.nn.modules.activation.Tanh'})
82
+ )
83
+ ```
84
+
85
+ ## Citing & Authors
86
+
87
+ <!--- Describe where people can find more information -->
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "bert-base-multilingual-uncased",
3
+ "architectures": [
4
+ "BertModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "directionality": "bidi",
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 3072,
14
+ "layer_norm_eps": 1e-12,
15
+ "max_position_embeddings": 512,
16
+ "model_type": "bert",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "pad_token_id": 0,
20
+ "pooler_fc_size": 768,
21
+ "pooler_num_attention_heads": 12,
22
+ "pooler_num_fc_layers": 3,
23
+ "pooler_size_per_head": 128,
24
+ "pooler_type": "first_token_transform",
25
+ "position_embedding_type": "absolute",
26
+ "torch_dtype": "float32",
27
+ "transformers_version": "4.18.0",
28
+ "type_vocab_size": 2,
29
+ "use_cache": true,
30
+ "vocab_size": 105879
31
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/config_sentence_transformers.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "__version__": {
3
+ "sentence_transformers": "2.2.0",
4
+ "transformers": "4.18.0",
5
+ "pytorch": "1.11.0+cpu"
6
+ }
7
+ }
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/eval/similarity_evaluation_results.csv ADDED
@@ -0,0 +1,361 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,steps,cosine_pearson,cosine_spearman,euclidean_pearson,euclidean_spearman,manhattan_pearson,manhattan_spearman,dot_pearson,dot_spearman
2
+ 0,1,0.48529711552308574,0.38078219472070013,0.44962528426065407,0.3536714883004561,0.4583858700909903,0.33272230606663117,0.3869374670168732,0.2895916367616975
3
+ 0,2,0.48717804995465797,0.38078219472070013,0.4515827866146165,0.3536714883004561,0.4605188692909378,0.33765152541576643,0.388095454262142,0.2895916367616975
4
+ 0,3,0.49039101344385044,0.38078219472070013,0.45501492010235356,0.3536714883004561,0.4643039386393474,0.33765152541576643,0.39018239203731486,0.2895916367616975
5
+ 0,4,0.49366479670622143,0.38078219472070013,0.4586967171927639,0.3536714883004561,0.4684821843705163,0.33765152541576643,0.3933181796949744,0.2895916367616975
6
+ 0,5,0.49777142456914436,0.3931052430935383,0.4633549893902452,0.3450453544394694,0.47427407116653614,0.33765152541576643,0.39741657945868836,0.30930851415823857
7
+ 0,6,0.5021549542086592,0.3857114140698354,0.4683208294486994,0.3413484399276179,0.48110902132212596,0.36229762216144284,0.40265579945825136,0.3167023431819415
8
+ 0,7,0.5085260960933932,0.374620670534281,0.47551635547259746,0.3536714883004561,0.4904338653572067,0.3844791092325516,0.409823011090317,0.3167023431819415
9
+ 0,8,0.5165122968979814,0.4263774737002014,0.4846936807937925,0.3770852802088487,0.5006788609635354,0.3844791092325516,0.41878914749756646,0.3167023431819415
10
+ 0,9,0.5235807268042675,0.4571850946322969,0.4932027037162094,0.39064063341897065,0.5098281972152537,0.4263774737002014,0.42825611697539456,0.32902539155477967
11
+ 0,10,0.52946106959775,0.4374682172357558,0.5010251865601574,0.42514516886291764,0.5193434665478103,0.4423974365848911,0.4390760814387304,0.320399257693793
12
+ 0,11,0.5387197202223395,0.4177513398392147,0.512743830543602,0.4436297414221749,0.5325143413756296,0.4781342768661218,0.45064661199177003,0.3586007076495914
13
+ 0,-1,0.5387197202223395,0.4177513398392147,0.512743830543602,0.4436297414221749,0.5325143413756296,0.4781342768661218,0.45064661199177003,0.3586007076495914
14
+ 1,1,0.5449354080606865,0.44486204625945874,0.5221738071564141,0.4436297414221749,0.5404162384985577,0.4793665817034057,0.46329172993601975,0.34750996411403695
15
+ 1,2,0.5515413626722339,0.49168963007624383,0.5325121852554815,0.47566966719155424,0.5483296841525782,0.4818311913779733,0.47152562529793046,0.3857114140698354
16
+ 1,3,0.5546177980490679,0.47197275267970273,0.5417088238954171,0.47566966719155424,0.5562923221614183,0.46334661881871597,0.47597538535866213,0.4239128640256338
17
+ 1,4,0.548356646688428,0.46457892365599984,0.5439371768681275,0.4596497043068645,0.5575303743109596,0.47197275267970273,0.46967040439020347,0.41898364467649857
18
+ 1,5,0.5397137454525891,0.4128221204900794,0.5418360897722265,0.4485589607713102,0.5552778176177972,0.46457892365599984,0.4674855508371061,0.43500360756118817
19
+ 1,6,0.5300583123185931,0.4313066930493367,0.537354759589473,0.4485589607713102,0.5522035153341999,0.4362359123984719,0.4669079728912656,0.4399328269103234
20
+ 1,7,0.5338377997300303,0.40912520597822793,0.5356818909179053,0.4325389978866205,0.5515670295132963,0.4362359123984719,0.49002964182347725,0.45472048495772927
21
+ 1,8,0.5520137937643196,0.4325389978866205,0.5402061648033505,0.43377130272390435,0.5569181036311481,0.4423974365848911,0.5442415194342456,0.49415423975081146
22
+ 1,9,0.5614223241898332,0.4670435333305674,0.543583635985733,0.47566966719155424,0.5596089573752014,0.4966188494253791,0.5711280203593978,0.46088200914414834
23
+ 1,10,0.55173687827034,0.4966188494253791,0.5418026511687549,0.49045732523896,0.5584368529873147,0.5397495187303128,0.5208023004070511,0.5052449832863658
24
+ 1,11,0.5134524211021072,0.4276097785374852,0.5243660945515266,0.46457892365599984,0.5420229781715091,0.48306349621525707,0.39581168991411814,0.3536714883004561
25
+ 1,-1,0.5134524211021072,0.4276097785374852,0.5243660945515266,0.46457892365599984,0.5420229781715091,0.48306349621525707,0.39581168991411814,0.3536714883004561
26
+ 2,1,0.4763948897715039,0.37954988988341626,0.5023404608765445,0.4732050575169865,0.5210874100844225,0.4621143139814322,0.3226739480223658,0.30191468513453573
27
+ 2,2,0.4447694082838155,0.37954988988341626,0.4788238276861986,0.44116513174760724,0.49865039935885425,0.4115898156527956,0.27900667899652415,0.17375498205701848
28
+ 2,3,0.41719824683036877,0.3586007076495914,0.45658253484236533,0.40296368179180886,0.4765198748589813,0.40296368179180886,0.24522921714120832,0.17375498205701848
29
+ 2,4,0.39500284984972456,0.3240961722056444,0.43660159133355686,0.40296368179180886,0.4549394897001766,0.37585297537156487,0.22541450795853468,0.17375498205701848
30
+ 2,5,0.38809531173955525,0.32902539155477967,0.4280132170474755,0.40912520597822793,0.44631295627252393,0.4436297414221749,0.22910430452202535,0.17375498205701848
31
+ 2,6,0.4077730588380947,0.3844791092325516,0.4422812122442838,0.42514516886291764,0.45996978386636794,0.47443736235427036,0.2576132599465963,0.19593646912812723
32
+ 2,7,0.41869129428101926,0.39926676727995736,0.44528268037556823,0.4707404478424189,0.4645736412799295,0.5101742026355011,0.2861432793987968,0.22058256587380362
33
+ 2,8,0.43358316925786,0.39187293825625447,0.44888405197517056,0.5459110429167319,0.47166348768655497,0.5286587751947585,0.3243992790136815,0.2945208561108328
34
+ 2,9,0.4702945952020354,0.5434464332421642,0.4690402693192187,0.571789444499692,0.4955100077924944,0.5693248348251244,0.38943400271937145,0.3931052430935383
35
+ 2,10,0.5086479198349083,0.6432631250621536,0.4913082387535552,0.5878094073843817,0.5193938786331342,0.5631633106387054,0.4611121690638544,0.5594663961268539
36
+ 2,11,0.5284280316594965,0.6099908944554905,0.5025595641738465,0.6025970654317876,0.529704163326234,0.5779509686861112,0.49398759241953033,0.579183273523395
37
+ 2,-1,0.5284280316594965,0.6099908944554905,0.5025595641738465,0.6025970654317876,0.529704163326234,0.5779509686861112,0.49398759241953033,0.579183273523395
38
+ 3,1,0.5445337400965623,0.55823409128957,0.51000040331441,0.5606987009641377,0.5349239435656523,0.5471433477540156,0.5236116760159346,0.5754863590115435
39
+ 3,2,0.5495072162384804,0.5643956154759892,0.5135043816575403,0.5680925299878407,0.5355275885717149,0.5422141284048805,0.5373448316257564,0.524961860682907
40
+ 3,3,0.5468113325737158,0.5360526042184613,0.5154238162900978,0.538517213893029,0.5349048101269802,0.5459110429167319,0.5397336407567275,0.5015480687745144
41
+ 3,4,0.5563427162243643,0.5089418977982173,0.5334414671822545,0.5508402622658671,0.5526279141997721,0.5261941655201908,0.5447527721836363,0.4867604107271085
42
+ 3,5,0.541995377557389,0.49045732523896,0.5382796613427108,0.5360526042184613,0.5568293410756527,0.5138711171473526,0.5185626088067399,0.5175680316592041
43
+ 3,6,0.5278161166060588,0.4781342768661218,0.5402746234334093,0.5557694816150024,0.5585132356787681,0.5422141284048805,0.4928205156391864,0.4707404478424189
44
+ 3,7,0.5261305426591256,0.4423974365848911,0.5510154055129053,0.5557694816150024,0.569929865896769,0.5422141284048805,0.47419315881436375,0.4732050575169865
45
+ 3,8,0.5353607254132768,0.5224972510083393,0.567427238535954,0.5668602251505568,0.5873424604103037,0.5545371767777186,0.4584243349248577,0.43870052207303967
46
+ 3,9,0.5381276839089757,0.4978511542626629,0.5810781169678512,0.5668602251505568,0.6005333365602645,0.5668602251505568,0.44137942566770777,0.4263774737002014
47
+ 3,10,0.5127819028657166,0.4966188494253791,0.5722651475755638,0.5496079574285834,0.5910007642692446,0.5496079574285834,0.42406755962628795,0.41035751081551175
48
+ 3,11,0.44508287353312603,0.4362359123984719,0.5147972090554311,0.5360526042184613,0.5292919538414292,0.49168963007624383,0.39297333793603473,0.40173137695452504
49
+ 3,-1,0.44508287353312603,0.4362359123984719,0.5147972090554311,0.5360526042184613,0.5292919538414292,0.49168963007624383,0.39297333793603473,0.40173137695452504
50
+ 4,1,0.39771527031041987,0.41528673016464707,0.4624845364370852,0.47197275267970273,0.4707338125524364,0.4473266559340263,0.37185342985300096,0.3573684028123076
51
+ 4,2,0.33656742354667313,0.3561360979750237,0.3852689535958004,0.39187293825625447,0.3868050983281545,0.3869437189071192,0.3420164100163199,0.37215606085971337
52
+ 4,3,0.2940975744752773,0.3450453544394694,0.32693988461365636,0.32902539155477967,0.32435661737117905,0.3228638673683606,0.31714371162061605,0.3450453544394694
53
+ 4,4,0.2946403382007048,0.3586007076495914,0.31935684444252965,0.2895916367616975,0.3153725831146269,0.28096550290071076,0.3202790886119258,0.374620670534281
54
+ 4,5,0.33358342117538153,0.3586007076495914,0.3502419147613105,0.33025769639206354,0.3458133257967267,0.33025769639206354,0.3566527902452539,0.37092375602242955
55
+ 4,6,0.4342153318647408,0.4263774737002014,0.43140933532803444,0.3869437189071192,0.4279538424063538,0.4239128640256338,0.4438935352815474,0.4793665817034057
56
+ 4,7,0.5392868078310784,0.5237295558456231,0.5192954343295637,0.4165190350019308,0.5249919239559697,0.4670435333305674,0.49987520320580614,0.5619310058014215
57
+ 4,8,0.5282579420894538,0.5533048719404349,0.5598316990388552,0.5952032364080846,0.5892092290185124,0.6568184782722756,0.39301437478433704,0.5397495187303128
58
+ 4,9,0.4428225464516429,0.46950814300513505,0.5112141221751499,0.5742540541742597,0.5500758560382982,0.6087585896182066,0.2746956158133486,0.21688565136195218
59
+ 4,10,0.4031097860004174,0.2698747593651564,0.4760669141630619,0.48059888654068944,0.5150626508544052,0.5989001509199361,0.22998532886116435,0.1922395546162758
60
+ 4,11,0.4027976065695224,0.285894722249846,0.47530673864694817,0.4423974365848911,0.5068426803335039,0.5693248348251244,0.2331622426922148,0.17498728689430232
61
+ 4,-1,0.4027976065695224,0.285894722249846,0.47530673864694817,0.4423974365848911,0.5068426803335039,0.5693248348251244,0.2331622426922148,0.17498728689430232
62
+ 5,1,0.4197815428676828,0.285894722249846,0.490271398882717,0.48552810588982476,0.5137402539050611,0.5742540541742597,0.2621911315351123,0.17498728689430232
63
+ 5,2,0.4706470438525433,0.4596497043068645,0.5422710390126622,0.5606987009641377,0.5578559398938804,0.6087585896182066,0.31865842391546373,0.1675934578705994
64
+ 5,3,0.5246777419213438,0.5434464332421642,0.5968748642730898,0.579183273523395,0.604493893996696,0.6025970654317876,0.3690549014228985,0.1675934578705994
65
+ 5,4,0.5856006532802479,0.579183273523395,0.6426718533672469,0.6087585896182066,0.6454544306181418,0.6358692960384507,0.44329945063254717,0.3770852802088487
66
+ 5,5,0.6361957129868221,0.613687808967342,0.6675218760770847,0.6629800024586947,0.669585106487246,0.6851614895298034,0.5239893083804855,0.531123384869326
67
+ 5,6,0.6591281906821019,0.6592830879468432,0.6596821910404318,0.6038293702690715,0.6661674668484896,0.6173847234791934,0.5855396564711297,0.5619310058014215
68
+ 5,7,0.6577228785602653,0.6186170283164771,0.639502458232335,0.5890417122216656,0.6502262705645203,0.6186170283164771,0.6061804266535913,0.5397495187303128
69
+ 5,8,0.6514769052884082,0.6087585896182066,0.620056063291267,0.571789444499692,0.6349913284521937,0.5976678460826522,0.6117902893350435,0.55823409128957
70
+ 5,9,0.6515029596377546,0.6235462476656125,0.6116667509652602,0.5298910800320422,0.6284611378741713,0.5976678460826522,0.6081387474614781,0.55823409128957
71
+ 5,10,0.6502041797040701,0.6099908944554905,0.6040450852759389,0.5052449832863658,0.6220166628046253,0.5890417122216656,0.6069761883978928,0.5705571396624082
72
+ 5,11,0.6693496266303586,0.6383339057130183,0.6167126605066766,0.55823409128957,0.6336507721010656,0.5878094073843817,0.5975381789975847,0.5902740170589494
73
+ 5,-1,0.6693496266303586,0.6383339057130183,0.6167126605066766,0.55823409128957,0.6336507721010656,0.5878094073843817,0.5975381789975847,0.5902740170589494
74
+ 6,1,0.6736924483593192,0.6383339057130183,0.6158668121776789,0.5631633106387054,0.6322073529213942,0.6087585896182066,0.594662278661538,0.606293979943639
75
+ 6,2,0.6812011658256713,0.6383339057130183,0.6201243969742077,0.5742540541742597,0.6351889456745462,0.6087585896182066,0.5885481895317839,0.6444954298994374
76
+ 6,3,0.6808498804545307,0.6173847234791934,0.620924691564066,0.5693248348251244,0.6361129975128427,0.6087585896182066,0.5886782434259991,0.6444954298994374
77
+ 6,4,0.678702653088179,0.633404686363883,0.6200989239163153,0.5643956154759892,0.6360242129830647,0.6087585896182066,0.586966963661415,0.6223139428283286
78
+ 6,5,0.6449525559050504,0.6099908944554905,0.5861684420795986,0.5298910800320422,0.6054671487568033,0.531123384869326,0.588525216544618,0.5890417122216656
79
+ 6,6,0.6183729061916988,0.5878094073843817,0.5624373927897274,0.4534881801204454,0.5844780156815539,0.45225587528316163,0.5848940420190996,0.5853447977098141
80
+ 6,7,0.5965858401753046,0.5878094073843817,0.545707907928656,0.4534881801204454,0.5696216880378586,0.47566966719155424,0.5679277650796066,0.5779509686861112
81
+ 6,8,0.5735648877995514,0.5434464332421642,0.5253042572066255,0.4485589607713102,0.5501256848651377,0.4436297414221749,0.5432129307615188,0.5409818235675966
82
+ 6,9,0.5763630570227898,0.5557694816150024,0.5270906827405788,0.4485589607713102,0.551594512976662,0.47443736235427036,0.5122435361202204,0.5496079574285834
83
+ 6,10,0.6021044499025092,0.5668602251505568,0.5505503634412722,0.5126388123100688,0.572133123492167,0.5471433477540156,0.46929257490405335,0.4978511542626629
84
+ 6,11,0.6183053657378672,0.5693248348251244,0.5704694067133306,0.5298910800320422,0.5895646256660879,0.5422141284048805,0.4303531632592794,0.47566966719155424
85
+ 6,-1,0.6183053657378672,0.5693248348251244,0.5704694067133306,0.5298910800320422,0.5895646256660879,0.5422141284048805,0.4303531632592794,0.47566966719155424
86
+ 7,1,0.6226782939904674,0.5989001509199361,0.5832814533279762,0.5298910800320422,0.6007177690435928,0.5853447977098141,0.39897691590721923,0.4473266559340263
87
+ 7,2,0.6206034684934495,0.6087585896182066,0.585912237638623,0.565627920313273,0.6035638879087051,0.565627920313273,0.3854654421068829,0.40296368179180886
88
+ 7,3,0.617221687005862,0.6087585896182066,0.5874372963652885,0.565627920313273,0.6050680317227884,0.565627920313273,0.376548095563892,0.3462776592767532
89
+ 7,4,0.6056017057805645,0.6087585896182066,0.5848055781839239,0.5557694816150024,0.6045115411223895,0.5779509686861112,0.38142597103926296,0.40296368179180886
90
+ 7,5,0.5866193666763118,0.5915063218962332,0.5601957305241864,0.5779509686861112,0.583559742900134,0.5767186638488274,0.40167445109871314,0.4682758381678513
91
+ 7,6,0.5638044844226601,0.5483756525912995,0.5397603688406619,0.5730217493369759,0.5658951128226162,0.5964355412453685,0.4249503286308365,0.48552810588982476
92
+ 7,7,0.5284885105987116,0.48552810588982476,0.5099671051003709,0.4436297414221749,0.536899938320651,0.5557694816150024,0.4395341657539514,0.4596497043068645
93
+ 7,8,0.49863548851980843,0.5323556897066098,0.4869260674085234,0.4793665817034057,0.5134814524680715,0.47197275267970273,0.4376372511690384,0.4399328269103234
94
+ 7,9,0.4948349666978195,0.5224972510083393,0.4874054689501326,0.46334661881871597,0.5146646682088938,0.504012678449082,0.426213263156338,0.4399328269103234
95
+ 7,10,0.5166076596691227,0.45472048495772927,0.5084441251173065,0.5175680316592041,0.5356528951545129,0.544678738079448,0.4029109228282831,0.4423974365848911
96
+ 7,11,0.5273344603027958,0.524961860682907,0.5231512604736955,0.5126388123100688,0.5497031137538453,0.5200326413337716,0.3815456206945138,0.4078929011409442
97
+ 7,-1,0.5273344603027958,0.524961860682907,0.5231512604736955,0.5126388123100688,0.5497031137538453,0.5200326413337716,0.3815456206945138,0.4078929011409442
98
+ 8,1,0.5303624954357862,0.5545371767777186,0.5281638694411466,0.531123384869326,0.5525481864148072,0.5422141284048805,0.36217763331709496,0.37092375602242955
99
+ 8,2,0.5326578596365688,0.531123384869326,0.5311180793463159,0.5594663961268539,0.5541634903877999,0.5298910800320422,0.35059013197374905,0.3549037931377399
100
+ 8,3,0.533379199253232,0.531123384869326,0.5312770305942673,0.5545371767777186,0.5528425494291045,0.5298910800320422,0.34736975050882446,0.3536714883004561
101
+ 8,4,0.5335576268864328,0.5163357268219203,0.5296954537761875,0.5335879945438936,0.5499377806203062,0.5298910800320422,0.35086631110193656,0.3536714883004561
102
+ 8,5,0.5277733856778991,0.5089418977982173,0.5220368500899923,0.5335879945438936,0.5416897337200146,0.5298910800320422,0.3515679348707215,0.3733883656969972
103
+ 8,6,0.5221977023376533,0.5286587751947585,0.5155798729656705,0.5138711171473526,0.5353147567435957,0.5298910800320422,0.35517253378653696,0.34011613509033406
104
+ 8,7,0.5114716879114066,0.4842958010525409,0.5063781414568272,0.4374682172357558,0.5267978778876554,0.5237295558456231,0.36574315415373965,0.3228638673683606
105
+ 8,8,0.49488814700957084,0.4929219349135277,0.4929113443049856,0.4288420833747691,0.5146855777181061,0.476901972028838,0.37279954677861493,0.34381304960218556
106
+ 8,9,0.47964977724170754,0.4239128640256338,0.4811925834538394,0.4263774737002014,0.5036396193612623,0.4202159495137823,0.3753214495681651,0.3672268415105781
107
+ 8,10,0.46921145523461133,0.39803446244267354,0.47457322546668135,0.4263774737002014,0.4965283131683261,0.4460943510967425,0.37562037224212635,0.3647622318360105
108
+ 8,11,0.47768290721265694,0.37092375602242955,0.4788277890998054,0.39803446244267354,0.4992672138874489,0.40049907211724123,0.37294216189440266,0.3857114140698354
109
+ 8,-1,0.47768290721265694,0.37092375602242955,0.4788277890998054,0.39803446244267354,0.4992672138874489,0.40049907211724123,0.37294216189440266,0.3857114140698354
110
+ 9,1,0.48769047413866307,0.4226805591883499,0.48395370854573305,0.39803446244267354,0.5030818779846524,0.44979126560859395,0.3683411310400144,0.42144825435106614
111
+ 9,2,0.49572407877456903,0.476901972028838,0.48895168858681626,0.4226805591883499,0.5059844857138701,0.5027803736117983,0.3687115964504733,0.42514516886291764
112
+ 9,3,0.5012985608430838,0.476901972028838,0.49324888822154445,0.4990834590999468,0.5089111682359179,0.5027803736117983,0.3711487078030165,0.40542829146637654
113
+ 9,4,0.48735006749887383,0.47197275267970273,0.48386759406120305,0.44486204625945874,0.4998158731651568,0.48552810588982476,0.3760667144776701,0.42514516886291764
114
+ 9,5,0.47995098383361134,0.4423974365848911,0.47932622147211246,0.43500360756118817,0.49589803873250926,0.48552810588982476,0.384043032246428,0.40049907211724123
115
+ 9,6,0.4694607817962274,0.43500360756118817,0.47270154484076454,0.43870052207303967,0.4897263103037543,0.4485589607713102,0.3917650596883699,0.39926676727995736
116
+ 9,7,0.45527008507992517,0.43870052207303967,0.46371639391802427,0.4226805591883499,0.48143939098054334,0.45225587528316163,0.39764405427421906,0.361065317324159
117
+ 9,8,0.4464411087869352,0.43007438821205285,0.458123318777454,0.43007438821205285,0.47574225526655584,0.45225587528316163,0.404905830296325,0.40173137695452504
118
+ 9,9,0.4420950433810391,0.43007438821205285,0.45564634022148814,0.43007438821205285,0.47292492656371676,0.44979126560859395,0.41284649738922063,0.39433754793082215
119
+ 9,10,0.4632560938765059,0.43007438821205285,0.47000447995045813,0.44979126560859395,0.4863687655397081,0.44486204625945874,0.4292107242992318,0.40296368179180886
120
+ 9,11,0.48138664846147705,0.44979126560859395,0.48162131120517626,0.45472048495772927,0.49749290409826336,0.47443736235427036,0.4493083242344721,0.4202159495137823
121
+ 9,-1,0.48138664846147705,0.44979126560859395,0.48162131120517626,0.45472048495772927,0.49749290409826336,0.47443736235427036,0.4493083242344721,0.4202159495137823
122
+ 10,1,0.4960607833658969,0.45472048495772927,0.490548889662635,0.504012678449082,0.5059175880840663,0.4818311913779733,0.46319550367157547,0.4202159495137823
123
+ 10,2,0.5123732583160698,0.4842958010525409,0.49786833617574144,0.49168963007624383,0.5114997596998341,0.5126388123100688,0.4729551147625569,0.4399328269103234
124
+ 10,3,0.5249237896278235,0.5212649461710555,0.5023652173501241,0.5126388123100688,0.5143802158545006,0.5594663961268539,0.47929130751392257,0.45225587528316163
125
+ 10,4,0.5243136222233947,0.5212649461710555,0.4987749291491579,0.5027803736117983,0.5105332044426388,0.5397495187303128,0.47184137772233403,0.44486204625945874
126
+ 10,5,0.5161533266322103,0.5212649461710555,0.49391463665548263,0.5089418977982173,0.5063641566909003,0.5298910800320422,0.4585001825665812,0.4325389978866205
127
+ 10,6,0.5062424433000628,0.5212649461710555,0.488988254782881,0.5286587751947585,0.5025830715546286,0.5298910800320422,0.44494409776377947,0.4165190350019308
128
+ 10,7,0.48712700891364413,0.4288420833747691,0.4792769521511705,0.504012678449082,0.4948553559270067,0.47197275267970273,0.4271485641108955,0.3844791092325516
129
+ 10,8,0.46823235091146,0.4078929011409442,0.4673753607546798,0.4596497043068645,0.48382370199739216,0.47443736235427036,0.41097416288285976,0.3647622318360105
130
+ 10,9,0.45287773045477486,0.39803446244267354,0.4549958005096014,0.4177513398392147,0.4713741101195609,0.47443736235427036,0.3955963568572257,0.3573684028123076
131
+ 10,10,0.44020280757739233,0.39803446244267354,0.4439685105753835,0.4128221204900794,0.45963425769521715,0.4534881801204454,0.38158377086217243,0.35243918346317227
132
+ 10,11,0.434873223058116,0.39803446244267354,0.4338382566474878,0.4128221204900794,0.4490969226331099,0.4325389978866205,0.37086728545275177,0.35983301248687516
133
+ 10,-1,0.434873223058116,0.39803446244267354,0.4338382566474878,0.4128221204900794,0.4490969226331099,0.4325389978866205,0.37086728545275177,0.35983301248687516
134
+ 11,1,0.43561518166823504,0.4128221204900794,0.42738325495050766,0.4325389978866205,0.44232998421535585,0.4325389978866205,0.3609830890578213,0.3770852802088487
135
+ 11,2,0.43640706136288043,0.4128221204900794,0.42272757146924467,0.4128221204900794,0.43771278051919116,0.4362359123984719,0.35222464643669005,0.34381304960218556
136
+ 11,3,0.43900370406643385,0.4128221204900794,0.420264768639684,0.4325389978866205,0.4352349724559844,0.4362359123984719,0.3458661328023862,0.3672268415105781
137
+ 11,4,0.4464105765946631,0.4177513398392147,0.4215010570169908,0.4128221204900794,0.43626047532123774,0.4362359123984719,0.3428427147959308,0.39187293825625447
138
+ 11,5,0.4537438872612261,0.41405442532736325,0.4238252095954256,0.4177513398392147,0.4384334071133244,0.4571850946322969,0.3409687065951028,0.388176023744403
139
+ 11,6,0.46117639576981717,0.45472048495772927,0.42645778534352297,0.43007438821205285,0.44087506720299,0.4534881801204454,0.3413198552763194,0.38324680439526776
140
+ 11,7,0.4640260051748497,0.45472048495772927,0.4271979863784436,0.43007438821205285,0.44194975012102095,0.4534881801204454,0.34365641775923517,0.38324680439526776
141
+ 11,8,0.46274181646960805,0.45472048495772927,0.42513569119593775,0.45472048495772927,0.4404946937523576,0.4534881801204454,0.3458108939602147,0.38324680439526776
142
+ 11,9,0.4539034422457203,0.43870052207303967,0.41842761137999707,0.41405442532736325,0.43497517749483916,0.4534881801204454,0.34881449196812636,0.4202159495137823
143
+ 11,10,0.4411911656453227,0.4177513398392147,0.4094640916215729,0.4177513398392147,0.42673841143551744,0.4571850946322969,0.3520591452940951,0.4239128640256338
144
+ 11,11,0.41898915452833807,0.4165190350019308,0.3978008831839796,0.4165190350019308,0.4158952937833843,0.4362359123984719,0.3473213629402587,0.34750996411403695
145
+ 11,-1,0.41898915452833807,0.4165190350019308,0.3978008831839796,0.4165190350019308,0.4158952937833843,0.4362359123984719,0.3473213629402587,0.34750996411403695
146
+ 12,1,0.3969379315652891,0.44116513174760724,0.3857812378512335,0.44116513174760724,0.40484319125267987,0.4362359123984719,0.34276168100459614,0.3080762093209548
147
+ 12,2,0.3888096499675908,0.4078929011409442,0.3826343287666506,0.40542829146637654,0.40230488941479464,0.4202159495137823,0.34389645521861284,0.3450453544394694
148
+ 12,3,0.38861231762710796,0.4165190350019308,0.3848441022838075,0.40049907211724123,0.40499984681983486,0.4202159495137823,0.34787778958129956,0.34750996411403695
149
+ 12,4,0.3946074355410476,0.42514516886291764,0.3904592849530967,0.4325389978866205,0.41107943660066043,0.4362359123984719,0.35321579498300576,0.34750996411403695
150
+ 12,5,0.40646939227682893,0.4436297414221749,0.39861716390033286,0.4325389978866205,0.4195580320875901,0.4325389978866205,0.36143558797224207,0.34750996411403695
151
+ 12,6,0.415391573189963,0.4423974365848911,0.40404704170515604,0.4226805591883499,0.42453120118138965,0.4226805591883499,0.3686818519516784,0.34750996411403695
152
+ 12,7,0.41793762041600196,0.4423974365848911,0.40612803745591913,0.4226805591883499,0.4257942394168518,0.4226805591883499,0.3729915961799825,0.37585297537156487
153
+ 12,8,0.42106671200952916,0.4423974365848911,0.4079336693982376,0.4276097785374852,0.4268512686485475,0.4226805591883499,0.377505605031068,0.37585297537156487
154
+ 12,9,0.42458481977325413,0.4325389978866205,0.4088097741218747,0.4276097785374852,0.42726434204538233,0.4276097785374852,0.3813992211101561,0.3549037931377399
155
+ 12,10,0.42858164190895576,0.4325389978866205,0.41021437918218145,0.4276097785374852,0.4283939837907833,0.4276097785374852,0.3848350980948506,0.37215606085971337
156
+ 12,11,0.4265332623692707,0.4325389978866205,0.40863386869149454,0.4276097785374852,0.42660656795212926,0.4226805591883499,0.3855978431960536,0.4041959866290927
157
+ 12,-1,0.4265332623692707,0.4325389978866205,0.40863386869149454,0.4276097785374852,0.42660656795212926,0.4226805591883499,0.3855978431960536,0.4041959866290927
158
+ 13,1,0.42972596746017877,0.4325389978866205,0.4108320233897158,0.4276097785374852,0.42872191895982836,0.4276097785374852,0.3901646574848284,0.4041959866290927
159
+ 13,2,0.4315521834488351,0.4325389978866205,0.41331753132437854,0.4276097785374852,0.4310142801105522,0.4226805591883499,0.3914313238416848,0.43377130272390435
160
+ 13,3,0.441608278248776,0.4374682172357558,0.41995452210709117,0.4276097785374852,0.4374922001751374,0.4436297414221749,0.3956249519571555,0.4399328269103234
161
+ 13,4,0.4510672703378669,0.4276097785374852,0.427443224123312,0.4682758381678513,0.44440715090596083,0.4436297414221749,0.3984885827085817,0.45225587528316163
162
+ 13,5,0.46109524582682815,0.4436297414221749,0.43515025964752585,0.46457892365599984,0.45141898398654406,0.46457892365599984,0.400509188994379,0.4325389978866205
163
+ 13,6,0.46521147018174985,0.4682758381678513,0.4384991019233305,0.46457892365599984,0.45431493696760095,0.5015480687745144,0.3998508698781658,0.42144825435106614
164
+ 13,7,0.4681343622395834,0.46457892365599984,0.44166899327896336,0.46457892365599984,0.4571327141866045,0.5015480687745144,0.3994150484731604,0.42144825435106614
165
+ 13,8,0.46498442990217315,0.4436297414221749,0.44196208706490314,0.4436297414221749,0.4573976550388202,0.48059888654068944,0.3978586046067648,0.42514516886291764
166
+ 13,9,0.46037229749848907,0.4374682172357558,0.44097404587145606,0.4436297414221749,0.4565343390008158,0.48059888654068944,0.39602756270488876,0.42514516886291764
167
+ 13,10,0.45802945684845175,0.4374682172357558,0.43938146883798257,0.4276097785374852,0.45502288201443825,0.4436297414221749,0.39351210153265687,0.42514516886291764
168
+ 13,11,0.4582612384203979,0.4374682172357558,0.4396647476771204,0.4374682172357558,0.4550486692791864,0.43870052207303967,0.3892025493983536,0.42514516886291764
169
+ 13,-1,0.4582612384203979,0.4374682172357558,0.4396647476771204,0.4374682172357558,0.4550486692791864,0.43870052207303967,0.3892025493983536,0.42514516886291764
170
+ 14,1,0.45579426045457844,0.4325389978866205,0.43902408446119623,0.4325389978866205,0.45385533223115926,0.4325389978866205,0.38494719560239876,0.42514516886291764
171
+ 14,2,0.4554167387537307,0.4325389978866205,0.4392437524477536,0.4325389978866205,0.453945478554834,0.4325389978866205,0.38281268794604717,0.388176023744403
172
+ 14,3,0.46297666320416503,0.4374682172357558,0.4439245162155586,0.4325389978866205,0.4584613133053343,0.4325389978866205,0.3825959116719051,0.388176023744403
173
+ 14,4,0.4686826431852479,0.4374682172357558,0.4483442503960922,0.4325389978866205,0.4628265650595696,0.4485589607713102,0.3825755167242705,0.40049907211724123
174
+ 14,5,0.4759255042230717,0.4781342768661218,0.45376329403186366,0.4325389978866205,0.46800277225319187,0.4485589607713102,0.3811144737178189,0.4202159495137823
175
+ 14,6,0.4811168825703731,0.48552810588982476,0.4579061226552646,0.4534881801204454,0.4715469009907835,0.4534881801204454,0.3799274399875436,0.4202159495137823
176
+ 14,7,0.4835254367235538,0.4818311913779733,0.46005286740974005,0.4534881801204454,0.4735343358210831,0.4534881801204454,0.3792358455577175,0.4202159495137823
177
+ 14,8,0.4800947661055337,0.4781342768661218,0.4576875382722047,0.4325389978866205,0.4718399804275636,0.4584173994695807,0.37756203297810736,0.4202159495137823
178
+ 14,9,0.47564553111808416,0.4534881801204454,0.4545390107273902,0.4423974365848911,0.4694724133759295,0.4584173994695807,0.3767560033622522,0.40049907211724123
179
+ 14,10,0.4708341958823986,0.4423974365848911,0.4509198080705795,0.4423974365848911,0.46648639327683367,0.4423974365848911,0.37571513888411645,0.40049907211724123
180
+ 14,11,0.46631740430686486,0.4423974365848911,0.44774391998524293,0.4423974365848911,0.46405757771816447,0.4423974365848911,0.375037043365363,0.40049907211724123
181
+ 14,-1,0.46631740430686486,0.4423974365848911,0.44774391998524293,0.4423974365848911,0.46405757771816447,0.4423974365848911,0.375037043365363,0.40049907211724123
182
+ 15,1,0.46382485737379503,0.4423974365848911,0.4455750357254916,0.4423974365848911,0.46226955008766635,0.4423974365848911,0.3742001035536935,0.40049907211724123
183
+ 15,2,0.4622822178951847,0.4423974365848911,0.44385543193647914,0.4423974365848911,0.4607155065431054,0.4423974365848911,0.3739167940728608,0.3844791092325516
184
+ 15,3,0.4608965680109538,0.4423974365848911,0.4423514527721062,0.4423974365848911,0.45935923401329615,0.4423974365848911,0.37383192730879067,0.3844791092325516
185
+ 15,4,0.4605915303091211,0.4423974365848911,0.44069344922162196,0.4423974365848911,0.4577421888889528,0.4423974365848911,0.37316797473779,0.3844791092325516
186
+ 15,5,0.461417395969182,0.45225587528316163,0.4396019315523507,0.4473266559340263,0.45657566768627184,0.4584173994695807,0.37344834072370997,0.39187293825625447
187
+ 15,6,0.4685247030548835,0.4682758381678513,0.4424078094820914,0.45225587528316163,0.458839926532568,0.4670435333305674,0.3762406037662174,0.4276097785374852
188
+ 15,7,0.4751171795648763,0.4867604107271085,0.4445527559408198,0.4892250204016762,0.46033767937880754,0.4879927155643924,0.377634001252522,0.4473266559340263
189
+ 15,8,0.4784151949398362,0.4707404478424189,0.4450356053207588,0.4793665817034057,0.4603551617292075,0.49538654458809533,0.3781069040789122,0.4473266559340263
190
+ 15,9,0.48043854545755876,0.4707404478424189,0.44509119598184244,0.4707404478424189,0.45995530877582214,0.49538654458809533,0.3778813805742123,0.4510235704458778
191
+ 15,10,0.4802602976095749,0.47443736235427036,0.4437235017310967,0.4707404478424189,0.4582389931812909,0.4990834590999468,0.37629679988580456,0.4510235704458778
192
+ 15,11,0.4813597762595649,0.47443736235427036,0.4442975999046291,0.4707404478424189,0.458469450304974,0.4990834590999468,0.3796618797948188,0.4510235704458778
193
+ 15,-1,0.4813597762595649,0.47443736235427036,0.4442975999046291,0.4707404478424189,0.458469450304974,0.4990834590999468,0.3796618797948188,0.4510235704458778
194
+ 16,1,0.48154081865456483,0.4707404478424189,0.44498744657628686,0.4707404478424189,0.4591245724564455,0.49538654458809533,0.3830233582330619,0.4510235704458778
195
+ 16,2,0.4802748556577301,0.48059888654068944,0.4454257725374736,0.4732050575169865,0.4596929652479568,0.49538654458809533,0.3856635087686395,0.4571850946322969
196
+ 16,3,0.47792652590497603,0.48059888654068944,0.4451799269036696,0.4485589607713102,0.459620497244831,0.46334661881871597,0.3878263849584396,0.44116513174760724
197
+ 16,4,0.4741048469470409,0.4485589607713102,0.4436195237580434,0.4485589607713102,0.458201545820656,0.46334661881871597,0.3887025531207796,0.44116513174760724
198
+ 16,5,0.46624718010214694,0.4276097785374852,0.44033605654711583,0.4313066930493367,0.45533533974737206,0.4584173994695807,0.3892025172488786,0.4423974365848911
199
+ 16,6,0.45666587766422706,0.4313066930493367,0.43597624625332154,0.4313066930493367,0.4511208406298831,0.4460943510967425,0.3884112045980195,0.4128221204900794
200
+ 16,7,0.4487359682443066,0.4362359123984719,0.4320676747273246,0.4362359123984719,0.44739155088325233,0.4460943510967425,0.3868589475652658,0.4263774737002014
201
+ 16,8,0.4491959368626342,0.4313066930493367,0.43291447481873063,0.4313066930493367,0.4482215591813201,0.4263774737002014,0.3873466192775074,0.4263774737002014
202
+ 16,9,0.45051146962807714,0.4313066930493367,0.4348948610936464,0.4313066930493367,0.45028325747051456,0.4263774737002014,0.38966153511858076,0.4460943510967425
203
+ 16,10,0.4553014380115291,0.45225587528316163,0.4390301913337607,0.4313066930493367,0.45438113199956276,0.4263774737002014,0.39339170508496235,0.4423974365848911
204
+ 16,11,0.46108074997823356,0.4263774737002014,0.4408919719641726,0.4263774737002014,0.4562627405219182,0.43870052207303967,0.392993888349958,0.4288420833747691
205
+ 16,-1,0.46108074997823356,0.4263774737002014,0.4408919719641726,0.4263774737002014,0.4562627405219182,0.43870052207303967,0.392993888349958,0.4288420833747691
206
+ 17,1,0.46348920430934093,0.4263774737002014,0.44115473354045087,0.4263774737002014,0.456406804341833,0.43870052207303967,0.3921141272708046,0.4534881801204454
207
+ 17,2,0.46606839352816876,0.4226805591883499,0.44142178523277753,0.4226805591883499,0.4565636643174031,0.43870052207303967,0.3907535117062706,0.4670435333305674
208
+ 17,3,0.46958366782013145,0.43500360756118817,0.44210901328210794,0.4226805591883499,0.4571252473563006,0.4436297414221749,0.3889986287016795,0.44979126560859395
209
+ 17,4,0.47129412653638797,0.44979126560859395,0.44091229178603725,0.4177513398392147,0.4557389052772306,0.4510235704458778,0.38582179981930603,0.44979126560859395
210
+ 17,5,0.4687819399072341,0.44979126560859395,0.4375609874376749,0.44979126560859395,0.4521763439351025,0.47566966719155424,0.38168237981287056,0.44979126560859395
211
+ 17,6,0.4661918377877693,0.44979126560859395,0.4351354207745108,0.42514516886291764,0.449735228197131,0.44116513174760724,0.3787580966940286,0.44979126560859395
212
+ 17,7,0.4641842089036326,0.44979126560859395,0.43282136561151474,0.42514516886291764,0.447468075325645,0.44116513174760724,0.37510530026507916,0.4399328269103234
213
+ 17,8,0.4619692638440081,0.44979126560859395,0.4304689578118267,0.42514516886291764,0.4452658914956129,0.44116513174760724,0.3712941398427611,0.4399328269103234
214
+ 17,9,0.4605656016214299,0.44979126560859395,0.42888855271083437,0.42514516886291764,0.44389873329076046,0.44116513174760724,0.36734540744536814,0.4399328269103234
215
+ 17,10,0.4585385454907716,0.44979126560859395,0.42758975766672536,0.42514516886291764,0.4427749898462053,0.44116513174760724,0.36419618530406306,0.4202159495137823
216
+ 17,11,0.45553292952764535,0.42514516886291764,0.42564097158009034,0.42144825435106614,0.4409791567111483,0.44116513174760724,0.36110222838388306,0.4202159495137823
217
+ 17,-1,0.45553292952764535,0.42514516886291764,0.42564097158009034,0.42144825435106614,0.4409791567111483,0.44116513174760724,0.36110222838388306,0.4202159495137823
218
+ 18,1,0.4527133305952763,0.42514516886291764,0.4238059808826188,0.42144825435106614,0.43920213566777494,0.43377130272390435,0.3582931873219889,0.4239128640256338
219
+ 18,2,0.45164406003083435,0.42514516886291764,0.4230322991709976,0.42144825435106614,0.438436710027121,0.43377130272390435,0.3565912796036571,0.4239128640256338
220
+ 18,3,0.4512393084401073,0.42514516886291764,0.42241203337588984,0.42144825435106614,0.4376824230389371,0.43377130272390435,0.3556931060597256,0.4239128640256338
221
+ 18,4,0.4522838079178768,0.42514516886291764,0.42242327629637433,0.42144825435106614,0.4374812611179237,0.44116513174760724,0.35652371661027904,0.4239128640256338
222
+ 18,5,0.45099375681112336,0.42514516886291764,0.42122795856281564,0.42144825435106614,0.43608061010140947,0.44116513174760724,0.35742112546605154,0.4239128640256338
223
+ 18,6,0.4466623438798476,0.42514516886291764,0.4175617417549494,0.42144825435106614,0.4319802134945872,0.44116513174760724,0.356436133622369,0.4239128640256338
224
+ 18,7,0.4409772917742574,0.4288420833747691,0.4131045978849413,0.42144825435106614,0.42702809803496966,0.4534881801204454,0.35524077589848857,0.4239128640256338
225
+ 18,8,0.4396590874757564,0.42144825435106614,0.4118653672859208,0.42144825435106614,0.42542228639956164,0.46088200914414834,0.3566931198107368,0.4239128640256338
226
+ 18,9,0.4408571683572625,0.42144825435106614,0.41348089446642333,0.42144825435106614,0.42676330688466374,0.4534881801204454,0.3607328985355273,0.4239128640256338
227
+ 18,10,0.44364641124952897,0.42144825435106614,0.41631584643015557,0.42144825435106614,0.4293993091310374,0.4534881801204454,0.3645528323738206,0.43377130272390435
228
+ 18,11,0.4464421868079609,0.42144825435106614,0.4205265804881998,0.42144825435106614,0.43349111121212375,0.4534881801204454,0.3703033120715371,0.4177513398392147
229
+ 18,-1,0.4464421868079609,0.42144825435106614,0.4205265804881998,0.42144825435106614,0.43349111121212375,0.4534881801204454,0.3703033120715371,0.4177513398392147
230
+ 19,1,0.44982847149666316,0.42144825435106614,0.42527608136856476,0.42144825435106614,0.4383572815404385,0.46334661881871597,0.3761653320608477,0.4177513398392147
231
+ 19,2,0.44969300940541557,0.4263774737002014,0.4277448789404409,0.42144825435106614,0.44101140137258754,0.46334661881871597,0.3806042903982694,0.3968021576053897
232
+ 19,3,0.4513454999485417,0.4263774737002014,0.4314294830881116,0.4263774737002014,0.4446951313460771,0.46334661881871597,0.38570185272556823,0.41405442532736325
233
+ 19,4,0.4494368167337588,0.4263774737002014,0.43293322789344946,0.4263774737002014,0.4460707643101096,0.4460943510967425,0.3892507779004252,0.40912520597822793
234
+ 19,5,0.4487721021064075,0.4263774737002014,0.43500692180543704,0.4263774737002014,0.44805564901297124,0.4460943510967425,0.393277869375069,0.41898364467649857
235
+ 19,6,0.44872020019912207,0.4263774737002014,0.4363957817273334,0.4263774737002014,0.4491295844163627,0.4460943510967425,0.39606363144942436,0.40296368179180886
236
+ 19,7,0.45027942684575206,0.4263774737002014,0.4382261827297135,0.4263774737002014,0.4506474813545588,0.4460943510967425,0.3982590368804238,0.40296368179180886
237
+ 19,8,0.45192644982745084,0.4263774737002014,0.439851632493537,0.4263774737002014,0.4518640367697789,0.4621143139814322,0.40033913516311714,0.41898364467649857
238
+ 19,9,0.4547375808569255,0.4263774737002014,0.44117931379101094,0.4263774737002014,0.4525749109856887,0.4621143139814322,0.4019025710627768,0.41898364467649857
239
+ 19,10,0.45979299457303285,0.4263774737002014,0.4440388318186733,0.4263774737002014,0.45473843787029883,0.46334661881871597,0.40467002803935553,0.41898364467649857
240
+ 19,11,0.4678440358157503,0.4313066930493367,0.4472764324443083,0.4436297414221749,0.45717935737186965,0.46334661881871597,0.4061977656878145,0.41898364467649857
241
+ 19,-1,0.4678440358157503,0.4313066930493367,0.4472764324443083,0.4436297414221749,0.45717935737186965,0.46334661881871597,0.4061977656878145,0.41898364467649857
242
+ 20,1,0.4732165025877772,0.4276097785374852,0.4492676403789167,0.4584173994695807,0.45831938612802203,0.4707404478424189,0.4073218274613699,0.41405442532736325
243
+ 20,2,0.4776715755038687,0.4510235704458778,0.4503734982918466,0.46581122849328366,0.45871655704711223,0.49538654458809533,0.40803295719140004,0.45472048495772927
244
+ 20,3,0.4810777995190253,0.47566966719155424,0.4511379138397378,0.46581122849328366,0.4589637125472023,0.48552810588982476,0.40800322856996085,0.45472048495772927
245
+ 20,4,0.48298895627975313,0.46581122849328366,0.45121741809482585,0.46581122849328366,0.4586204851618237,0.4892250204016762,0.40770245933526617,0.45472048495772927
246
+ 20,5,0.4829674103211304,0.46581122849328366,0.45043306113719417,0.46581122849328366,0.45747226876319674,0.4892250204016762,0.4060748488235856,0.45472048495772927
247
+ 20,6,0.47955895395354997,0.46581122849328366,0.44866686325858174,0.46581122849328366,0.4555243254141206,0.4892250204016762,0.40370195012660604,0.45472048495772927
248
+ 20,7,0.47548794197183086,0.46581122849328366,0.4466309889177322,0.46581122849328366,0.4532951143170756,0.48552810588982476,0.40154308562853447,0.43870052207303967
249
+ 20,8,0.4702549434991903,0.45225587528316163,0.44405232967213726,0.46581122849328366,0.45064440030792624,0.48552810588982476,0.3989353627252337,0.43870052207303967
250
+ 20,9,0.46565045726764653,0.4313066930493367,0.4425644388473421,0.46581122849328366,0.44931509921496593,0.48552810588982476,0.3977611156603027,0.4239128640256338
251
+ 20,10,0.4613553080724099,0.4313066930493367,0.44190713200383575,0.4584173994695807,0.4489514730034653,0.49538654458809533,0.39744825238143194,0.40296368179180886
252
+ 20,11,0.4590889251071732,0.4263774737002014,0.4414475294295112,0.42144825435106614,0.4485383247395962,0.46334661881871597,0.3969685769002545,0.40296368179180886
253
+ 20,-1,0.4590889251071732,0.4263774737002014,0.4414475294295112,0.42144825435106614,0.4485383247395962,0.46334661881871597,0.3969685769002545,0.40296368179180886
254
+ 21,1,0.4561550139332289,0.4263774737002014,0.44126741020110016,0.4313066930493367,0.44872441280796826,0.46334661881871597,0.3963265165327526,0.40296368179180886
255
+ 21,2,0.4560342645563441,0.4263774737002014,0.44282472350390034,0.4263774737002014,0.4506618056285012,0.46334661881871597,0.3979776674046466,0.40296368179180886
256
+ 21,3,0.4591113332818121,0.4263774737002014,0.4458324564548014,0.4263774737002014,0.45388165413537324,0.46334661881871597,0.4007793602400119,0.40296368179180886
257
+ 21,4,0.46068463324003506,0.4263774737002014,0.4486692424554124,0.4263774737002014,0.4573049322068964,0.4584173994695807,0.4028579744306931,0.4226805591883499
258
+ 21,5,0.4609367572672423,0.4263774737002014,0.4504352434028437,0.4263774737002014,0.4595983172433038,0.4584173994695807,0.4041158330904916,0.4226805591883499
259
+ 21,6,0.46063841550172036,0.4263774737002014,0.45126289666458164,0.4263774737002014,0.46083117624602654,0.4621143139814322,0.4045332501834856,0.4226805591883499
260
+ 21,7,0.45977502537191606,0.4263774737002014,0.45158665320122116,0.4263774737002014,0.4614841052536316,0.4621143139814322,0.4047910838502766,0.4226805591883499
261
+ 21,8,0.4590260110887493,0.4263774737002014,0.45143378151223956,0.4263774737002014,0.46154637525503583,0.4460943510967425,0.40479207782184945,0.4226805591883499
262
+ 21,9,0.4570711878493755,0.4263774737002014,0.4502867828318552,0.4263774737002014,0.46062463858998043,0.4460943510967425,0.4038550748305567,0.4226805591883499
263
+ 21,10,0.45754739109219045,0.4263774737002014,0.4503048477556718,0.4263774737002014,0.46071659510328505,0.4460943510967425,0.4039676194606289,0.4226805591883499
264
+ 21,11,0.4600245845253883,0.4263774737002014,0.45153965840351606,0.4263774737002014,0.4616790975372609,0.4460943510967425,0.4044403630230989,0.4226805591883499
265
+ 21,-1,0.4600245845253883,0.4263774737002014,0.45153965840351606,0.4263774737002014,0.4616790975372609,0.4460943510967425,0.4044403630230989,0.4226805591883499
266
+ 22,1,0.4635891959067143,0.4263774737002014,0.453245216341097,0.4263774737002014,0.46305160670432377,0.4584173994695807,0.40547172495608685,0.4226805591883499
267
+ 22,2,0.4667501669532427,0.4263774737002014,0.45446341930588546,0.4263774737002014,0.46394491784382563,0.4584173994695807,0.40614268942353443,0.4226805591883499
268
+ 22,3,0.47013260992834693,0.4263774737002014,0.45568499702598625,0.4226805591883499,0.464751728316839,0.4584173994695807,0.40690421799267473,0.43870052207303967
269
+ 22,4,0.4728077171411577,0.4263774737002014,0.4565553539185235,0.4226805591883499,0.4652380136072091,0.4584173994695807,0.40746091020315467,0.43870052207303967
270
+ 22,5,0.4739362631193595,0.4226805591883499,0.45637306464864313,0.4226805591883499,0.4646293838335282,0.43870052207303967,0.4071113791476273,0.43870052207303967
271
+ 22,6,0.47490834135414345,0.4226805591883499,0.4560277572683534,0.43870052207303967,0.4638843131858177,0.43870052207303967,0.4067276740397048,0.43500360756118817
272
+ 22,7,0.4755778991068949,0.4226805591883499,0.4554430239406575,0.43870052207303967,0.46297780575406783,0.46581122849328366,0.4063221394867439,0.43500360756118817
273
+ 22,8,0.47416483777284685,0.4226805591883499,0.4540553222299426,0.43870052207303967,0.46132513337805503,0.46581122849328366,0.40486950811661804,0.43500360756118817
274
+ 22,9,0.4722299443102834,0.4226805591883499,0.4529086895465986,0.43870052207303967,0.4599784249524389,0.4584173994695807,0.4033392823868331,0.43500360756118817
275
+ 22,10,0.4711607067528245,0.4226805591883499,0.4522109239344016,0.4226805591883499,0.45910431997419304,0.4584173994695807,0.40206140185608186,0.43500360756118817
276
+ 22,11,0.4706132171819881,0.4226805591883499,0.45179577424615236,0.4226805591883499,0.45850081235876755,0.4584173994695807,0.40101763629157283,0.43500360756118817
277
+ 22,-1,0.4706132171819881,0.4226805591883499,0.45179577424615236,0.4226805591883499,0.45850081235876755,0.4584173994695807,0.40101763629157283,0.43500360756118817
278
+ 23,1,0.46920772535229305,0.4226805591883499,0.4506218769141855,0.4226805591883499,0.4571058516288118,0.4584173994695807,0.3996611919037277,0.42514516886291764
279
+ 23,2,0.4695762232066892,0.4226805591883499,0.45062681415349004,0.4226805591883499,0.45690590222952454,0.4584173994695807,0.3991955526743272,0.42514516886291764
280
+ 23,3,0.47031805569883234,0.4226805591883499,0.4508631134081583,0.4226805591883499,0.4569230446338842,0.4584173994695807,0.3991633663369757,0.42514516886291764
281
+ 23,4,0.47098364098977297,0.4226805591883499,0.45124810425614015,0.4226805591883499,0.45715260796581636,0.4584173994695807,0.3993013153425282,0.42514516886291764
282
+ 23,5,0.4729846513066905,0.4226805591883499,0.45241662317179215,0.4226805591883499,0.45814427816315473,0.4584173994695807,0.4003199227727539,0.42514516886291764
283
+ 23,6,0.474010699953588,0.4226805591883499,0.4532138499790105,0.4226805591883499,0.45884533639040304,0.4584173994695807,0.40077420860011426,0.42514516886291764
284
+ 23,7,0.47767126083206407,0.4226805591883499,0.4550523570911598,0.43870052207303967,0.46048002736270377,0.43870052207303967,0.40160623525235983,0.44486204625945874
285
+ 23,8,0.48055561234991834,0.4226805591883499,0.4564994296782141,0.43870052207303967,0.46170798610961533,0.43870052207303967,0.4022696820746231,0.44486204625945874
286
+ 23,9,0.48320754689490564,0.43870052207303967,0.4578256973708421,0.43870052207303967,0.46281196602662733,0.4436297414221749,0.40289572621979036,0.46950814300513505
287
+ 23,10,0.4853391829317347,0.4436297414221749,0.4588564134720468,0.43870052207303967,0.46362408661895693,0.4436297414221749,0.40340902839195225,0.46950814300513505
288
+ 23,11,0.48727890999258305,0.4436297414221749,0.4597176274639475,0.4436297414221749,0.4642826721029195,0.4436297414221749,0.40359448715080387,0.46950814300513505
289
+ 23,-1,0.48727890999258305,0.4436297414221749,0.4597176274639475,0.4436297414221749,0.4642826721029195,0.4436297414221749,0.40359448715080387,0.46950814300513505
290
+ 24,1,0.4874370071199752,0.4436297414221749,0.45993736179555267,0.4436297414221749,0.46446141752242986,0.4436297414221749,0.4035143343784849,0.46950814300513505
291
+ 24,2,0.48672131640808947,0.4436297414221749,0.4598877986583521,0.43870052207303967,0.4643829559635688,0.4436297414221749,0.4036003850469644,0.46950814300513505
292
+ 24,3,0.4856465344817647,0.4276097785374852,0.4596690917107371,0.43870052207303967,0.46409400930206723,0.43870052207303967,0.4039883907593523,0.44486204625945874
293
+ 24,4,0.4835579688860715,0.4226805591883499,0.4588081319934939,0.43870052207303967,0.46314081821991193,0.43870052207303967,0.40408779289443425,0.45472048495772927
294
+ 24,5,0.48092534015112015,0.4226805591883499,0.45759847499349393,0.43870052207303967,0.4619269819576082,0.43870052207303967,0.4038371021760394,0.43500360756118817
295
+ 24,6,0.47873363112974443,0.4226805591883499,0.45651072597760756,0.4226805591883499,0.46084830116066716,0.43870052207303967,0.4037062927635484,0.43500360756118817
296
+ 24,7,0.47729394049456936,0.4226805591883499,0.45574650653404775,0.4226805591883499,0.4600582336413894,0.43870052207303967,0.4036242061951156,0.43500360756118817
297
+ 24,8,0.4753891850906258,0.4263774737002014,0.4547047023199817,0.4226805591883499,0.4590073118760638,0.43870052207303967,0.4031600483765053,0.43870052207303967
298
+ 24,9,0.4731722632473682,0.4263774737002014,0.45341159066691583,0.4226805591883499,0.45769467069120207,0.43870052207303967,0.40243967503188577,0.43870052207303967
299
+ 24,10,0.47174585577855566,0.4263774737002014,0.4526084387468126,0.4226805591883499,0.4568324968027452,0.43870052207303967,0.40209055448076564,0.43870052207303967
300
+ 24,11,0.4788789402681896,0.4226805591883499,0.45656543324104315,0.4226805591883499,0.46048974221618855,0.43870052207303967,0.4045903321051564,0.43500360756118817
301
+ 24,-1,0.4788789402681896,0.4226805591883499,0.45656543324104315,0.4226805591883499,0.46048974221618855,0.43870052207303967,0.4045903321051564,0.43500360756118817
302
+ 25,1,0.48431399255441765,0.4226805591883499,0.45954235861282866,0.4436297414221749,0.46319569362287616,0.46334661881871597,0.40658658967801065,0.43500360756118817
303
+ 25,2,0.48705480107804566,0.4226805591883499,0.46102259894211706,0.4682758381678513,0.464418526121732,0.46334661881871597,0.4078334062349547,0.43500360756118817
304
+ 25,3,0.48645775277733116,0.4226805591883499,0.460657320999349,0.4682758381678513,0.4638676752910471,0.46334661881871597,0.4075400403560661,0.43500360756118817
305
+ 25,4,0.48536745428426714,0.4226805591883499,0.45995795105270354,0.4682758381678513,0.463028890855819,0.46334661881871597,0.40710636357392094,0.43500360756118817
306
+ 25,5,0.4845938156854232,0.4226805591883499,0.4594535884342218,0.4682758381678513,0.46240642795072195,0.4879927155643924,0.40695087147072634,0.43500360756118817
307
+ 25,6,0.48256850564271003,0.4226805591883499,0.4585535645090789,0.4682758381678513,0.461574476584032,0.46334661881871597,0.4066557314448661,0.43500360756118817
308
+ 25,7,0.47943078079425117,0.4226805591883499,0.45701509818066566,0.4436297414221749,0.46016535758042054,0.46334661881871597,0.40591213906900714,0.43870052207303967
309
+ 25,8,0.47627566605962873,0.4263774737002014,0.4555993001172792,0.4436297414221749,0.45889660478911426,0.46334661881871597,0.4054168475643217,0.43870052207303967
310
+ 25,9,0.47361844559259403,0.4263774737002014,0.4544177274777136,0.4226805591883499,0.45784673691166494,0.46334661881871597,0.4050311576599716,0.43870052207303967
311
+ 25,10,0.4716439182944492,0.4263774737002014,0.4537027143246377,0.4263774737002014,0.45727120460913573,0.4584173994695807,0.4050628860986293,0.43870052207303967
312
+ 25,11,0.46984046176810823,0.4263774737002014,0.45301811636032885,0.4263774737002014,0.45669774280214925,0.4584173994695807,0.4050598774784926,0.4226805591883499
313
+ 25,-1,0.46984046176810823,0.4263774737002014,0.45301811636032885,0.4263774737002014,0.45669774280214925,0.4584173994695807,0.4050598774784926,0.4226805591883499
314
+ 26,1,0.4686398939831746,0.4263774737002014,0.45274370325589003,0.4263774737002014,0.4565190723402513,0.4584173994695807,0.4049930757812582,0.4226805591883499
315
+ 26,2,0.4677615559243834,0.4263774737002014,0.45251097325116785,0.4263774737002014,0.4563757288080378,0.4584173994695807,0.4045090134769657,0.4226805591883499
316
+ 26,3,0.46757078282581194,0.4263774737002014,0.452473790515476,0.4263774737002014,0.456366301482888,0.4584173994695807,0.4042156796523881,0.4226805591883499
317
+ 26,4,0.4677798488170627,0.4263774737002014,0.45279408693686163,0.4263774737002014,0.45672074990841083,0.4584173994695807,0.40432398983770673,0.4226805591883499
318
+ 26,5,0.4680701935771076,0.4263774737002014,0.45341483513014497,0.4263774737002014,0.457395209472782,0.4621143139814322,0.4044849397285005,0.4226805591883499
319
+ 26,6,0.4681199312303484,0.4263774737002014,0.4541240282810822,0.4263774737002014,0.4581988391414302,0.4621143139814322,0.4049325553837905,0.4226805591883499
320
+ 26,7,0.4683676865551164,0.4263774737002014,0.45485288092751797,0.4263774737002014,0.45900349298510634,0.4621143139814322,0.4053828239060521,0.4226805591883499
321
+ 26,8,0.4682837739147857,0.4263774737002014,0.45539671743709487,0.4263774737002014,0.4596172653025511,0.4621143139814322,0.4057106167783243,0.4226805591883499
322
+ 26,9,0.46854898641751386,0.41035751081551175,0.4561501637832157,0.4263774737002014,0.46042422390946386,0.4621143139814322,0.4062599186486602,0.4226805591883499
323
+ 26,10,0.46905048172966063,0.41035751081551175,0.4568894054592002,0.4263774737002014,0.46118044234149635,0.4621143139814322,0.40678608787157877,0.4226805591883499
324
+ 26,11,0.475973293511789,0.4263774737002014,0.46122963833070446,0.4263774737002014,0.46532308541036027,0.4584173994695807,0.4097781265986629,0.4226805591883499
325
+ 26,-1,0.475973293511789,0.4263774737002014,0.46122963833070446,0.4263774737002014,0.46532308541036027,0.4584173994695807,0.4097781265986629,0.4226805591883499
326
+ 27,1,0.4819388063704525,0.4263774737002014,0.4649517877923738,0.4226805591883499,0.4689215884351454,0.4584173994695807,0.4123883892435465,0.43870052207303967
327
+ 27,2,0.4865540177276554,0.4263774737002014,0.46780926865060046,0.43870052207303967,0.471636481575962,0.4584173994695807,0.41448933657057785,0.43870052207303967
328
+ 27,3,0.4903014023873171,0.4226805591883499,0.47011165092690377,0.43870052207303967,0.4737568839062951,0.4584173994695807,0.41622115259870973,0.43870052207303967
329
+ 27,4,0.4931032880511955,0.4226805591883499,0.4717697730163813,0.43870052207303967,0.4752777493272452,0.46334661881871597,0.4174208222141952,0.43500360756118817
330
+ 27,5,0.49393788039540376,0.4226805591883499,0.4723890130614993,0.4436297414221749,0.47588225190788597,0.46334661881871597,0.41777531078447666,0.43500360756118817
331
+ 27,6,0.494070445786181,0.4226805591883499,0.47250026817375146,0.4682758381678513,0.47596929038111346,0.46334661881871597,0.4178570404933839,0.43500360756118817
332
+ 27,7,0.4935690633917491,0.4226805591883499,0.47241967236858695,0.4436297414221749,0.4759152685200707,0.46334661881871597,0.41786003055155085,0.43500360756118817
333
+ 27,8,0.49230506611493174,0.4226805591883499,0.47195896947037114,0.43870052207303967,0.47556729630071126,0.46334661881871597,0.4176133049496008,0.43870052207303967
334
+ 27,9,0.4907751335644307,0.4226805591883499,0.4711703563837993,0.4584173994695807,0.4748777065974168,0.4436297414221749,0.41715961633922766,0.43870052207303967
335
+ 27,10,0.48862109051257796,0.4263774737002014,0.47007069057278233,0.4423974365848911,0.4739045434203306,0.43870052207303967,0.41647984326732,0.43870052207303967
336
+ 27,11,0.48679639525610335,0.4263774737002014,0.4691369072822579,0.4423974365848911,0.4730625839080922,0.43870052207303967,0.4159117584750831,0.43870052207303967
337
+ 27,-1,0.48679639525610335,0.4263774737002014,0.4691369072822579,0.4423974365848911,0.4730625839080922,0.43870052207303967,0.4159117584750831,0.43870052207303967
338
+ 28,1,0.484945694587112,0.4460943510967425,0.46819020181456117,0.4423974365848911,0.47220029008704456,0.43870052207303967,0.4154048623043515,0.4226805591883499
339
+ 28,2,0.483085657784486,0.4460943510967425,0.4671709723873266,0.4423974365848911,0.47128730320521506,0.43870052207303967,0.41474102618877506,0.4226805591883499
340
+ 28,3,0.4814643142296744,0.4460943510967425,0.4662342226268317,0.4423974365848911,0.4704655469201971,0.43870052207303967,0.41413654004385564,0.4226805591883499
341
+ 28,4,0.47996041948922585,0.4460943510967425,0.4653280124771165,0.4460943510967425,0.46966924806083,0.43870052207303967,0.41347388611221003,0.4226805591883499
342
+ 28,5,0.47897143709626006,0.4460943510967425,0.46472883161951506,0.4460943510967425,0.46914263806335615,0.43870052207303967,0.41300220029224644,0.4226805591883499
343
+ 28,6,0.47793208563093614,0.4460943510967425,0.4640867263592889,0.4460943510967425,0.4685679370307464,0.43870052207303967,0.41246152297282046,0.4226805591883499
344
+ 28,7,0.4771164383023423,0.4460943510967425,0.4635279830709264,0.4460943510967425,0.4680430094261704,0.43870052207303967,0.4119689494453883,0.4226805591883499
345
+ 28,8,0.4764060649520727,0.4460943510967425,0.4630279460927092,0.4460943510967425,0.46756436307112803,0.43870052207303967,0.41151478691395443,0.4226805591883499
346
+ 28,9,0.47587213731328143,0.4460943510967425,0.4626746463514181,0.4460943510967425,0.46721530916802756,0.43870052207303967,0.41117495703780954,0.4226805591883499
347
+ 28,10,0.4755045206523389,0.4460943510967425,0.46246526895211404,0.4460943510967425,0.4670061537027512,0.43870052207303967,0.41095796389469497,0.4226805591883499
348
+ 28,11,0.4765312170501673,0.4460943510967425,0.46298662615709285,0.4460943510967425,0.4674869757584575,0.43870052207303967,0.41149330802177736,0.4226805591883499
349
+ 28,-1,0.4765312170501673,0.4460943510967425,0.46298662615709285,0.4460943510967425,0.4674869757584575,0.43870052207303967,0.41149330802177736,0.4226805591883499
350
+ 29,1,0.47750365987968335,0.4460943510967425,0.46348813265256195,0.4460943510967425,0.46796204158049115,0.43870052207303967,0.41196842054003036,0.4226805591883499
351
+ 29,2,0.4782125704415538,0.4460943510967425,0.4638831184459058,0.4460943510967425,0.4683423395636336,0.43870052207303967,0.4123037479798043,0.4226805591883499
352
+ 29,3,0.4788103779197085,0.4460943510967425,0.46418412829429656,0.4423974365848911,0.468623426990622,0.43870052207303967,0.41256409792563997,0.4226805591883499
353
+ 29,4,0.4792213887507013,0.4460943510967425,0.46442039447112904,0.4423974365848911,0.4688536197668134,0.43870052207303967,0.4127904388361994,0.4226805591883499
354
+ 29,5,0.4796563547666157,0.4460943510967425,0.46467200924160607,0.4423974365848911,0.46909803783036597,0.43870052207303967,0.4130477415396614,0.4226805591883499
355
+ 29,6,0.47996646624296646,0.4460943510967425,0.46487817100335693,0.4423974365848911,0.4693078027845499,0.43870052207303967,0.41324512207463027,0.4226805591883499
356
+ 29,7,0.48024040499623727,0.4460943510967425,0.4650574669010035,0.4423974365848911,0.4694832107832036,0.43870052207303967,0.4134118827511585,0.4226805591883499
357
+ 29,8,0.4802825786033299,0.4460943510967425,0.4650871887960608,0.4423974365848911,0.46951352487597225,0.43870052207303967,0.41345994843057865,0.4226805591883499
358
+ 29,9,0.4802634404938063,0.4460943510967425,0.46508660427572346,0.4423974365848911,0.46951718542350607,0.43870052207303967,0.41345440089742447,0.4226805591883499
359
+ 29,10,0.4802519399992996,0.4460943510967425,0.4650801552455548,0.4423974365848911,0.4695115494643359,0.43870052207303967,0.4134456600911062,0.4226805591883499
360
+ 29,11,0.4801984546623826,0.4460943510967425,0.46506154847902836,0.4423974365848911,0.46950224883145514,0.43870052207303967,0.41342602861439437,0.4226805591883499
361
+ 29,-1,0.4801984546623826,0.4460943510967425,0.46506154847902836,0.4423974365848911,0.46950224883145514,0.43870052207303967,0.41342602861439437,0.4226805591883499
codeScripts/Dependencies/BERT-models/Prueba3/Prueba_anterior/Model_bert-base-multilingual-uncased/30_Epochs/modules.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "idx": 0,
4
+ "name": "0",
5
+ "path": "",
6
+ "type": "sentence_transformers.models.Transformer"
7
+ },
8
+ {
9
+ "idx": 1,
10
+ "name": "1",
11
+ "path": "1_Pooling",
12
+ "type": "sentence_transformers.models.Pooling"
13
+ },
14
+ {
15
+ "idx": 2,
16
+ "name": "2",
17
+ "path": "2_Dense",
18
+ "type": "sentence_transformers.models.Dense"
19
+ }
20
+ ]