Add new SentenceTransformer model.
Browse files- .gitattributes +1 -0
- 1_Pooling/config.json +10 -0
- README.md +419 -0
- config.json +29 -0
- config_sentence_transformers.json +10 -0
- model.safetensors +3 -0
- modules.json +14 -0
- sentence_bert_config.json +4 -0
- sentencepiece.bpe.model +3 -0
- special_tokens_map.json +51 -0
- tokenizer.json +3 -0
- tokenizer_config.json +61 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
1_Pooling/config.json
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"word_embedding_dimension": 768,
|
3 |
+
"pooling_mode_cls_token": false,
|
4 |
+
"pooling_mode_mean_tokens": true,
|
5 |
+
"pooling_mode_max_tokens": false,
|
6 |
+
"pooling_mode_mean_sqrt_len_tokens": false,
|
7 |
+
"pooling_mode_weightedmean_tokens": false,
|
8 |
+
"pooling_mode_lasttoken": false,
|
9 |
+
"include_prompt": true
|
10 |
+
}
|
README.md
ADDED
@@ -0,0 +1,419 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
base_model: sentence-transformers/paraphrase-multilingual-mpnet-base-v2
|
3 |
+
datasets: []
|
4 |
+
language: []
|
5 |
+
library_name: sentence-transformers
|
6 |
+
pipeline_tag: sentence-similarity
|
7 |
+
tags:
|
8 |
+
- sentence-transformers
|
9 |
+
- sentence-similarity
|
10 |
+
- feature-extraction
|
11 |
+
- generated_from_trainer
|
12 |
+
- dataset_size:11165
|
13 |
+
- loss:BatchAllTripletLoss
|
14 |
+
widget:
|
15 |
+
- source_sentence: This Contract shall protect the Company for any “Loss in Excess
|
16 |
+
of Policy Limits” which as used herein shall mean an amount that the Company would
|
17 |
+
have been contractually liable to pay had it not been for the limit of the original
|
18 |
+
Policy as a result of an action against it by its insured; its insured’s assignee
|
19 |
+
or a third party claimant. Such loss in excess of the limit shall have been incurred
|
20 |
+
because of failure by the Company to settle within the Policy limit; or by reason
|
21 |
+
of alleged or actual negligence; fraud; or bad faith in rejecting an offer of
|
22 |
+
settlement or in the preparation of the defense or in the trial of any action
|
23 |
+
against its insured or in the preparation or prosecution of an appeal consequent
|
24 |
+
upon such action.
|
25 |
+
sentences:
|
26 |
+
- extra contractual obligations
|
27 |
+
- loss in excess of policy limits
|
28 |
+
- property and casualty insurance
|
29 |
+
- source_sentence: Whenever the term “Run-Off Reinsurer” is used in this Agreement;
|
30 |
+
such term shall mean a Reinsurer that is no longer an “active reinsurance market.”
|
31 |
+
A Reinsurer will no longer be an “active reinsurance market” if that Reinsurer
|
32 |
+
becomes insolvent; is placed into liquidation or receivership or if the Reinsurer
|
33 |
+
ceases all underwriting operations in the United States to the extent it no longer
|
34 |
+
accepts new and renewal business.
|
35 |
+
sentences:
|
36 |
+
- run-off reinsurer
|
37 |
+
- terrorism
|
38 |
+
- loss in excess of policy limits
|
39 |
+
- source_sentence: 'A. Applies to reinsurers other than Lloyd’s Underwriters and not
|
40 |
+
applicable to reinsurers registered in Canada: 1. This Article shall not be read
|
41 |
+
to conflict with or override the obligations of the parties to arbitrate their
|
42 |
+
disputes as provided for in the Arbitration Article. This Article is intended
|
43 |
+
as an aid to compelling arbitration or enforcing such arbitration or arbitral
|
44 |
+
award; not as an alternative to the Arbitration Article for resolving disputes
|
45 |
+
arising out of this Contract. 2. In the event of the failure of the Reinsurer
|
46 |
+
hereon to pay any amount claimed to be due hereunder; the Reinsurer hereon; at
|
47 |
+
the request of the Company; shall submit to the jurisdiction of any court of competent
|
48 |
+
jurisdiction within Canada and shall comply with all requirements necessary to
|
49 |
+
give such court jurisdiction; and all matters arising hereunder shall be determined
|
50 |
+
in accordance with the law and practice of such court. 3. The service of process
|
51 |
+
may be made upon Cassels Brock & Blackwell LLP; 2100 Scotia Plaza; 40 King Street
|
52 |
+
West; Toronto; Ontario; Canada; M5H 3C2. In any suit instituted against the Reinsurer
|
53 |
+
upon this Contract; the Reinsurer shall abide by the final decision of such court
|
54 |
+
or of any appellate court in the event of an appeal. 4. The above-named are authorized
|
55 |
+
and directed to accept service of process on behalf of the Reinsurer in any such
|
56 |
+
suit and/or upon the request of the Company; to give a written undertaking to
|
57 |
+
the Company that they shall enter a general appearance upon the Reinsurer’s behalf
|
58 |
+
in the event such a suit shall be instituted. 5. Further; pursuant to any statute
|
59 |
+
which makes provision therefor; the Reinsurer hereon hereby designates the Superintendent;
|
60 |
+
Commissioner or Director of insurance or other officer specified for that purpose
|
61 |
+
in the statute or his successor or successors in office as their true and lawful
|
62 |
+
attorney upon whom may be served any lawful process in any action; suit or proceeding
|
63 |
+
instituted by or on behalf of the Company or any beneficiary hereunder arising
|
64 |
+
out of this Contract; and hereby designates the above-named as the person to whom
|
65 |
+
the said officer is authorized to mail such process or a true copy thereof.'
|
66 |
+
sentences:
|
67 |
+
- 'nuclear energy risk exclusion '
|
68 |
+
- limited cyber loss exclusion
|
69 |
+
- service of suit
|
70 |
+
- source_sentence: scribing Reinsurer shall be deemed to provide cover; and no Subscribing
|
71 |
+
Reinsurer shall be liable to pay any claim or provide any benefit hereunder; to
|
72 |
+
the extent that the provision of such cover; payment of such claim or provision
|
73 |
+
of such benefit violates any applicable trade or economic sanctions law or regulation
|
74 |
+
with which such Subscribing Reinsurer is legally obligated to comply.
|
75 |
+
sentences:
|
76 |
+
- sanction limitation
|
77 |
+
- cyber loss limited exclusion
|
78 |
+
- sanction limitation
|
79 |
+
- source_sentence: 'Article 32 - Governing Law (BRMA 71B)
|
80 |
+
|
81 |
+
This Contract shall be governed by and construed in accordance with the laws of
|
82 |
+
the State of California.'
|
83 |
+
sentences:
|
84 |
+
- governing law
|
85 |
+
- service of suit
|
86 |
+
- service of suit
|
87 |
+
---
|
88 |
+
|
89 |
+
# SentenceTransformer based on sentence-transformers/paraphrase-multilingual-mpnet-base-v2
|
90 |
+
|
91 |
+
This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [sentence-transformers/paraphrase-multilingual-mpnet-base-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-mpnet-base-v2) on the cla and def datasets. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
|
92 |
+
|
93 |
+
## Model Details
|
94 |
+
|
95 |
+
### Model Description
|
96 |
+
- **Model Type:** Sentence Transformer
|
97 |
+
- **Base model:** [sentence-transformers/paraphrase-multilingual-mpnet-base-v2](https://huggingface.co/sentence-transformers/paraphrase-multilingual-mpnet-base-v2) <!-- at revision 79f2382ceacceacdf38563d7c5d16b9ff8d725d6 -->
|
98 |
+
- **Maximum Sequence Length:** 128 tokens
|
99 |
+
- **Output Dimensionality:** 768 tokens
|
100 |
+
- **Similarity Function:** Cosine Similarity
|
101 |
+
- **Training Datasets:**
|
102 |
+
- cla
|
103 |
+
- def
|
104 |
+
<!-- - **Language:** Unknown -->
|
105 |
+
<!-- - **License:** Unknown -->
|
106 |
+
|
107 |
+
### Model Sources
|
108 |
+
|
109 |
+
- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
|
110 |
+
- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
|
111 |
+
- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)
|
112 |
+
|
113 |
+
### Full Model Architecture
|
114 |
+
|
115 |
+
```
|
116 |
+
SentenceTransformer(
|
117 |
+
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: XLMRobertaModel
|
118 |
+
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
|
119 |
+
)
|
120 |
+
```
|
121 |
+
|
122 |
+
## Usage
|
123 |
+
|
124 |
+
### Direct Usage (Sentence Transformers)
|
125 |
+
|
126 |
+
First install the Sentence Transformers library:
|
127 |
+
|
128 |
+
```bash
|
129 |
+
pip install -U sentence-transformers
|
130 |
+
```
|
131 |
+
|
132 |
+
Then you can load this model and run inference.
|
133 |
+
```python
|
134 |
+
from sentence_transformers import SentenceTransformer
|
135 |
+
|
136 |
+
# Download from the 🤗 Hub
|
137 |
+
model = SentenceTransformer("Anakeen/paraphrase-multilingual-mpnet-base-v2_df_meta")
|
138 |
+
# Run inference
|
139 |
+
sentences = [
|
140 |
+
'Article 32 - Governing Law (BRMA 71B)\nThis Contract shall be governed by and construed in accordance with the laws of the State of California.',
|
141 |
+
'governing law',
|
142 |
+
'service of suit',
|
143 |
+
]
|
144 |
+
embeddings = model.encode(sentences)
|
145 |
+
print(embeddings.shape)
|
146 |
+
# [3, 768]
|
147 |
+
|
148 |
+
# Get the similarity scores for the embeddings
|
149 |
+
similarities = model.similarity(embeddings, embeddings)
|
150 |
+
print(similarities.shape)
|
151 |
+
# [3, 3]
|
152 |
+
```
|
153 |
+
|
154 |
+
<!--
|
155 |
+
### Direct Usage (Transformers)
|
156 |
+
|
157 |
+
<details><summary>Click to see the direct usage in Transformers</summary>
|
158 |
+
|
159 |
+
</details>
|
160 |
+
-->
|
161 |
+
|
162 |
+
<!--
|
163 |
+
### Downstream Usage (Sentence Transformers)
|
164 |
+
|
165 |
+
You can finetune this model on your own dataset.
|
166 |
+
|
167 |
+
<details><summary>Click to expand</summary>
|
168 |
+
|
169 |
+
</details>
|
170 |
+
-->
|
171 |
+
|
172 |
+
<!--
|
173 |
+
### Out-of-Scope Use
|
174 |
+
|
175 |
+
*List how the model may foreseeably be misused and address what users ought not to do with the model.*
|
176 |
+
-->
|
177 |
+
|
178 |
+
<!--
|
179 |
+
## Bias, Risks and Limitations
|
180 |
+
|
181 |
+
*What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
|
182 |
+
-->
|
183 |
+
|
184 |
+
<!--
|
185 |
+
### Recommendations
|
186 |
+
|
187 |
+
*What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
|
188 |
+
-->
|
189 |
+
|
190 |
+
## Training Details
|
191 |
+
|
192 |
+
### Training Datasets
|
193 |
+
|
194 |
+
#### cla
|
195 |
+
|
196 |
+
* Dataset: cla
|
197 |
+
* Size: 4,832 training samples
|
198 |
+
* Columns: <code>sentence</code>, <code>refined_name</code>, and <code>label</code>
|
199 |
+
* Approximate statistics based on the first 1000 samples:
|
200 |
+
| | sentence | refined_name | label |
|
201 |
+
|:--------|:-------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
202 |
+
| type | string | string | int |
|
203 |
+
| details | <ul><li>min: 14 tokens</li><li>mean: 109.03 tokens</li><li>max: 128 tokens</li></ul> | <ul><li>min: 3 tokens</li><li>mean: 7.38 tokens</li><li>max: 26 tokens</li></ul> | <ul><li>0: ~1.50%</li><li>1: ~0.10%</li><li>2: ~0.20%</li><li>3: ~0.30%</li><li>4: ~0.30%</li><li>5: ~0.70%</li><li>6: ~0.20%</li><li>7: ~0.10%</li><li>8: ~0.10%</li><li>9: ~0.10%</li><li>10: ~0.10%</li><li>11: ~0.10%</li><li>12: ~0.20%</li><li>13: ~0.70%</li><li>14: ~0.20%</li><li>15: ~0.20%</li><li>16: ~0.80%</li><li>17: ~0.40%</li><li>18: ~0.50%</li><li>19: ~0.50%</li><li>20: ~8.50%</li><li>21: ~0.40%</li><li>22: ~0.70%</li><li>23: ~0.30%</li><li>24: ~0.30%</li><li>25: ~0.90%</li><li>26: ~0.10%</li><li>27: ~0.10%</li><li>28: ~0.50%</li><li>29: ~0.30%</li><li>30: ~0.50%</li><li>31: ~0.20%</li><li>32: ~0.40%</li><li>33: ~0.10%</li><li>34: ~0.30%</li><li>35: ~0.10%</li><li>36: ~0.10%</li><li>37: ~0.50%</li><li>38: ~0.70%</li><li>39: ~0.10%</li><li>40: ~0.20%</li><li>41: ~0.20%</li><li>42: ~0.70%</li><li>43: ~0.10%</li><li>44: ~0.60%</li><li>45: ~0.30%</li><li>46: ~0.80%</li><li>47: ~4.10%</li><li>48: ~0.30%</li><li>49: ~0.10%</li><li>50: ~0.40%</li><li>51: ~0.50%</li><li>52: ~0.50%</li><li>53: ~1.60%</li><li>54: ~0.10%</li><li>55: ~0.30%</li><li>56: ~0.20%</li><li>57: ~0.60%</li><li>58: ~0.10%</li><li>59: ~0.10%</li><li>60: ~0.20%</li><li>61: ~0.30%</li><li>62: ~0.30%</li><li>63: ~0.50%</li><li>64: ~0.20%</li><li>65: ~0.30%</li><li>66: ~0.20%</li><li>67: ~0.10%</li><li>68: ~0.30%</li><li>69: ~0.20%</li><li>70: ~0.20%</li><li>71: ~0.10%</li><li>72: ~0.10%</li><li>73: ~3.30%</li><li>74: ~0.70%</li><li>75: ~0.60%</li><li>76: ~0.20%</li><li>77: ~0.40%</li><li>78: ~0.30%</li><li>79: ~3.70%</li><li>80: ~0.50%</li><li>81: ~0.40%</li><li>82: ~0.10%</li><li>83: ~0.20%</li><li>84: ~0.50%</li><li>85: ~1.80%</li><li>86: ~0.30%</li><li>87: ~2.70%</li><li>88: ~0.30%</li><li>89: ~3.00%</li><li>90: ~0.30%</li><li>91: ~1.60%</li><li>92: ~0.10%</li><li>93: ~0.40%</li><li>94: ~0.40%</li><li>95: ~0.10%</li><li>96: ~0.60%</li><li>97: ~0.40%</li><li>98: ~0.10%</li><li>99: ~0.20%</li><li>100: ~0.10%</li><li>101: ~0.30%</li><li>102: ~0.30%</li><li>103: ~0.80%</li><li>104: ~0.10%</li><li>105: ~0.20%</li><li>106: ~0.50%</li><li>107: ~3.90%</li><li>108: ~0.10%</li><li>109: ~0.10%</li><li>110: ~0.20%</li><li>111: ~0.40%</li><li>112: ~0.20%</li><li>113: ~0.20%</li><li>114: ~0.10%</li><li>115: ~0.20%</li><li>116: ~0.10%</li><li>117: ~0.20%</li><li>118: ~0.40%</li><li>119: ~0.10%</li><li>120: ~0.10%</li><li>121: ~0.10%</li><li>122: ~0.10%</li><li>123: ~0.10%</li><li>124: ~0.20%</li><li>125: ~0.10%</li><li>126: ~0.10%</li><li>127: ~0.10%</li><li>128: ~0.30%</li><li>129: ~0.30%</li><li>130: ~1.20%</li><li>131: ~0.10%</li><li>132: ~0.20%</li><li>133: ~0.10%</li><li>134: ~0.20%</li><li>135: ~2.00%</li><li>136: ~0.30%</li><li>137: ~0.50%</li><li>138: ~0.40%</li><li>139: ~0.70%</li><li>140: ~0.10%</li><li>141: ~0.30%</li><li>142: ~1.00%</li><li>143: ~0.30%</li><li>144: ~0.60%</li><li>145: ~0.10%</li><li>146: ~0.30%</li><li>147: ~0.10%</li><li>148: ~0.10%</li><li>149: ~0.20%</li><li>150: ~0.80%</li><li>151: ~0.10%</li><li>152: ~0.30%</li><li>153: ~0.10%</li><li>154: ~0.20%</li><li>155: ~0.10%</li><li>156: ~0.10%</li><li>157: ~0.10%</li><li>158: ~0.10%</li><li>159: ~0.30%</li><li>160: ~0.20%</li><li>161: ~3.80%</li><li>162: ~0.10%</li><li>163: ~0.10%</li><li>164: ~0.10%</li><li>165: ~0.20%</li><li>166: ~0.20%</li><li>167: ~0.10%</li><li>168: ~0.20%</li><li>169: ~0.20%</li><li>170: ~0.20%</li><li>171: ~0.50%</li><li>172: ~0.10%</li><li>173: ~0.10%</li><li>174: ~0.30%</li><li>175: ~0.90%</li><li>176: ~0.80%</li><li>177: ~0.50%</li><li>178: ~0.40%</li><li>179: ~0.30%</li><li>180: ~0.30%</li><li>181: ~0.20%</li><li>182: ~0.10%</li><li>183: ~0.10%</li><li>184: ~0.10%</li><li>185: ~0.10%</li><li>186: ~0.10%</li><li>187: ~1.00%</li><li>188: ~0.10%</li><li>189: ~0.20%</li><li>190: ~0.40%</li><li>191: ~0.10%</li><li>192: ~0.20%</li><li>193: ~0.10%</li><li>194: ~0.10%</li><li>195: ~0.50%</li><li>196: ~0.10%</li><li>197: ~0.50%</li><li>198: ~0.10%</li><li>199: ~0.20%</li><li>200: ~0.20%</li><li>201: ~0.10%</li><li>202: ~0.10%</li><li>203: ~0.20%</li><li>204: ~0.10%</li><li>205: ~0.10%</li><li>206: ~0.10%</li><li>207: ~0.10%</li><li>208: ~1.10%</li><li>209: ~0.10%</li><li>210: ~0.20%</li><li>211: ~0.10%</li><li>212: ~0.10%</li><li>213: ~0.10%</li><li>214: ~0.10%</li><li>215: ~0.10%</li><li>216: ~0.30%</li><li>217: ~0.30%</li><li>218: ~0.10%</li><li>219: ~0.10%</li><li>220: ~0.10%</li><li>221: ~0.10%</li><li>222: ~0.10%</li><li>223: ~0.10%</li><li>224: ~0.10%</li><li>225: ~0.20%</li><li>226: ~0.10%</li><li>227: ~0.10%</li><li>228: ~0.10%</li></ul> |
|
204 |
+
* Samples:
|
205 |
+
| sentence | refined_name | label |
|
206 |
+
|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------|:---------------|
|
207 |
+
| <code>1. This Contract does not cover any loss or liability accruing to the Reassured; directly or indirectly and whether as Insurer or Reinsurer; from any Pool of Insurers or Reinsurers formed for the purpose of covering Atomic or Nuclear Energy risks.<br>2. Without in any way restricting the operation of paragraph (1) of this Clause; this Contract does not cover any loss or liability accruing to the Reassured; directly or indirectly and whether as Insurer or Reinsurer; from any insurance against Physical Damage (including business interruption or consequential loss arising out of such Physical Damage) to:<br>I. Nuclear reactor power plants including all auxiliary property on the site; or<br>II. Any other nuclear reactor installation; including laboratories handling radioactive materials in connection with reactor installations; and “critical facilities” as such; or<br>III. Installations for fabricating complete fuel elements or for processing substantial quantities of “special nuclear material;” and for reprocessing; salvaging; chemically separating; storing or disposing of “spent” nuclear fuel or waste materials; or<br>IV. Installations other than those listed in paragraph (2) III above using substantial quantities of radioactive isotopes or other products of nuclear fission.<br>3. Without in any way restricting the operations of paragraphs (1) and (2) hereof; this Contract does not cover any loss or liability by radioactive contamination accruing to the Reassured; directly or indirectly; and whether as Insurer or Reinsurer; from any insurance on property which is on the same site as a nuclear reactor power plant or other nuclear installation and which normally would be insured therewith except that this paragraph (3) shall not operate<br>(a) where the Reassured does not have knowledge of such nuclear reactor power plant or nuclear installation; or<br>(b) where said insurance contains a provision excluding coverage for damage to property caused by or resulting from radioactive contamination; however caused. However on and after 1st January 1960 this sub-paragraph (b) shall only apply provided the said radioactive contamination exclusion provision has been approved by the Governmental Authority having jurisdiction thereof.<br>4. Without in any way restricting the operations of paragraphs (1); (2) and (3) hereof; this Contract does not cover any loss or liability by radioactive contamination accruing to the Reassured; directly or indirectly; and whether as Insurer or Reinsurer; when such radioactive contamination is a named hazard specifically insured against.<br>5. It is understood and agreed that this Clause shall not extend to risks using radioactive isotopes in any form where the nuclear exposure is not considered by the Reassured to be the primary hazard.<br>6. The term “special nuclear material” shall have the meaning given it in the Atomic Energy Act of 1954 or by any law amendatory thereof.<br>7. The Reassured to be sole judge of what constitutes:<br>(a) substantial quantities; and<br>(b) the extent of installation; plant or site.<br>Note. Without in any way restricting the operation of paragraph (1) hereof; it is understood and agreed that<br>(a) all policies issued by the Reassured on or before 31st December 1957 shall be free from the application of the other provisions of this Clause until expiry date or 31st December 1960 whichever first occurs whereupon all the provisions of this Clause shall apply;<br>(b) with respect to any risk located in Canada policies issued by the Reassured on or before 31st December 1958 shall be free from the application of the other provisions of this Clause until expiry date or 31st December 1960 whichever first occurs whereupon all the provisions of this Clause shall apply.</code> | <code>nuclear incident exclusion physical damage reinsurance u.s.a.</code> | <code>0</code> |
|
208 |
+
| <code>Downgrading clause ~ ABR1001 (Amended)<br><br>Reinsurer with an S&P Rating<br>Unless otherwise agreed by the Reinsured; the Reinsurer shall at all times during the Period of this Contract maintain an Insurer Financial Strength (IFS) rating from Standard & Poor's Rating Group of 55 Water Street; New York; NY 10041; USA ("S&P") equal to or greater than a rating of A minus as applied by S&P to that Reinsurer.</code> | <code>termination and downgrading </code> | <code>1</code> |
|
209 |
+
| <code>Dispute Resolution ~ ABR1004<br>Where any dispute or difference between the parties arising out of or in connection with this Contract; including formation and validity and whether arising during or after the period of this Contract; has not been settled through negotiation; both parties agree to try in good faith to settle such dispute by non- binding mediation; before resorting to arbitration in the manner set out below.</code> | <code>dispute resolution</code> | <code>2</code> |
|
210 |
+
* Loss: [<code>BatchAllTripletLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#batchalltripletloss)
|
211 |
+
|
212 |
+
#### def
|
213 |
+
|
214 |
+
* Dataset: def
|
215 |
+
* Size: 6,333 training samples
|
216 |
+
* Columns: <code>sentence</code>, <code>refined_name</code>, and <code>label</code>
|
217 |
+
* Approximate statistics based on the first 1000 samples:
|
218 |
+
| | sentence | refined_name | label |
|
219 |
+
|:--------|:-----------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
220 |
+
| type | string | string | int |
|
221 |
+
| details | <ul><li>min: 3 tokens</li><li>mean: 78.14 tokens</li><li>max: 128 tokens</li></ul> | <ul><li>min: 3 tokens</li><li>mean: 5.58 tokens</li><li>max: 16 tokens</li></ul> | <ul><li>0: ~0.10%</li><li>1: ~1.40%</li><li>2: ~0.40%</li><li>3: ~3.60%</li><li>4: ~0.50%</li><li>5: ~0.50%</li><li>6: ~0.50%</li><li>7: ~0.60%</li><li>8: ~2.10%</li><li>9: ~0.30%</li><li>10: ~0.10%</li><li>11: ~0.10%</li><li>12: ~0.10%</li><li>13: ~0.10%</li><li>14: ~0.10%</li><li>15: ~0.20%</li><li>16: ~0.10%</li><li>17: ~0.10%</li><li>18: ~0.10%</li><li>19: ~1.70%</li><li>20: ~0.40%</li><li>21: ~0.70%</li><li>22: ~0.10%</li><li>23: ~0.20%</li><li>24: ~0.40%</li><li>25: ~0.30%</li><li>26: ~0.40%</li><li>27: ~0.10%</li><li>28: ~0.10%</li><li>29: ~1.30%</li><li>30: ~0.10%</li><li>31: ~0.20%</li><li>32: ~3.80%</li><li>33: ~1.90%</li><li>34: ~0.80%</li><li>35: ~0.10%</li><li>36: ~2.40%</li><li>37: ~0.10%</li><li>38: ~0.10%</li><li>39: ~1.60%</li><li>40: ~0.10%</li><li>41: ~0.10%</li><li>42: ~0.10%</li><li>43: ~0.30%</li><li>44: ~0.10%</li><li>45: ~0.10%</li><li>46: ~0.20%</li><li>47: ~0.10%</li><li>48: ~0.30%</li><li>49: ~0.10%</li><li>50: ~0.10%</li><li>51: ~0.10%</li><li>52: ~0.10%</li><li>53: ~0.10%</li><li>54: ~5.60%</li><li>55: ~0.20%</li><li>56: ~0.10%</li><li>57: ~0.10%</li><li>58: ~0.30%</li><li>59: ~0.10%</li><li>60: ~0.40%</li><li>61: ~0.50%</li><li>62: ~1.30%</li><li>63: ~1.40%</li><li>64: ~0.50%</li><li>65: ~0.10%</li><li>66: ~0.80%</li><li>67: ~0.10%</li><li>68: ~0.60%</li><li>69: ~1.10%</li><li>70: ~0.20%</li><li>71: ~0.20%</li><li>72: ~0.10%</li><li>73: ~0.20%</li><li>74: ~1.30%</li><li>75: ~0.20%</li><li>76: ~0.10%</li><li>77: ~0.10%</li><li>78: ~0.50%</li><li>79: ~0.30%</li><li>80: ~0.40%</li><li>81: ~0.20%</li><li>82: ~0.40%</li><li>83: ~0.50%</li><li>84: ~1.70%</li><li>85: ~0.50%</li><li>86: ~0.10%</li><li>87: ~0.20%</li><li>88: ~0.90%</li><li>89: ~0.60%</li><li>90: ~0.10%</li><li>91: ~0.50%</li><li>92: ~0.10%</li><li>93: ~0.20%</li><li>94: ~0.10%</li><li>95: ~0.20%</li><li>96: ~0.10%</li><li>97: ~0.10%</li><li>98: ~0.20%</li><li>99: ~0.10%</li><li>100: ~0.10%</li><li>101: ~1.10%</li><li>102: ~0.20%</li><li>103: ~0.10%</li><li>104: ~0.50%</li><li>105: ~0.10%</li><li>106: ~0.10%</li><li>107: ~0.10%</li><li>108: ~0.70%</li><li>109: ~0.50%</li><li>110: ~0.20%</li><li>111: ~0.10%</li><li>112: ~0.20%</li><li>113: ~0.20%</li><li>114: ~0.10%</li><li>115: ~0.20%</li><li>116: ~0.20%</li><li>117: ~0.30%</li><li>118: ~0.20%</li><li>119: ~0.20%</li><li>120: ~0.50%</li><li>121: ~0.20%</li><li>122: ~0.10%</li><li>123: ~0.10%</li><li>124: ~0.30%</li><li>125: ~0.10%</li><li>126: ~0.20%</li><li>127: ~0.20%</li><li>128: ~0.30%</li><li>129: ~0.40%</li><li>130: ~0.30%</li><li>131: ~0.10%</li><li>132: ~0.30%</li><li>133: ~0.10%</li><li>134: ~0.20%</li><li>135: ~0.20%</li><li>136: ~0.20%</li><li>137: ~0.10%</li><li>138: ~0.40%</li><li>139: ~0.10%</li><li>140: ~0.10%</li><li>141: ~0.20%</li><li>142: ~0.50%</li><li>143: ~0.70%</li><li>144: ~0.10%</li><li>145: ~0.10%</li><li>146: ~0.20%</li><li>147: ~0.10%</li><li>148: ~0.10%</li><li>149: ~0.20%</li><li>150: ~0.20%</li><li>151: ~0.40%</li><li>152: ~0.10%</li><li>153: ~0.20%</li><li>154: ~0.10%</li><li>155: ~0.20%</li><li>156: ~0.10%</li><li>157: ~0.10%</li><li>158: ~0.10%</li><li>159: ~0.10%</li><li>160: ~0.20%</li><li>161: ~0.10%</li><li>162: ~0.10%</li><li>163: ~0.20%</li><li>164: ~0.10%</li><li>165: ~0.10%</li><li>166: ~0.20%</li><li>167: ~0.40%</li><li>168: ~0.20%</li><li>169: ~0.20%</li><li>170: ~0.10%</li><li>171: ~0.10%</li><li>172: ~0.20%</li><li>173: ~0.10%</li><li>174: ~0.10%</li><li>175: ~0.10%</li><li>176: ~0.10%</li><li>177: ~0.10%</li><li>178: ~0.20%</li><li>179: ~0.20%</li><li>180: ~0.30%</li><li>181: ~0.20%</li><li>182: ~0.10%</li><li>183: ~1.10%</li><li>184: ~0.10%</li><li>185: ~0.30%</li><li>186: ~0.10%</li><li>187: ~0.10%</li><li>188: ~0.10%</li><li>189: ~0.10%</li><li>190: ~0.10%</li><li>191: ~0.10%</li><li>192: ~0.10%</li><li>193: ~0.10%</li><li>194: ~0.20%</li><li>195: ~0.10%</li><li>196: ~0.10%</li><li>197: ~0.10%</li><li>198: ~0.10%</li><li>199: ~0.10%</li><li>200: ~0.10%</li><li>201: ~0.30%</li><li>202: ~0.10%</li><li>203: ~1.00%</li><li>204: ~0.20%</li><li>205: ~0.10%</li><li>206: ~0.10%</li><li>207: ~0.10%</li><li>208: ~0.10%</li><li>209: ~0.10%</li><li>210: ~0.10%</li><li>211: ~0.10%</li><li>212: ~0.10%</li><li>213: ~0.10%</li><li>214: ~0.10%</li><li>215: ~0.10%</li><li>216: ~0.10%</li><li>217: ~0.10%</li><li>218: ~0.10%</li><li>219: ~0.10%</li><li>220: ~0.10%</li><li>221: ~0.10%</li><li>222: ~0.10%</li><li>223: ~0.10%</li><li>224: ~0.10%</li><li>225: ~0.10%</li><li>226: ~0.10%</li><li>227: ~0.10%</li><li>228: ~0.10%</li><li>229: ~0.10%</li><li>230: ~0.10%</li><li>231: ~0.10%</li><li>232: ~0.10%</li><li>233: ~0.10%</li><li>234: ~0.20%</li><li>235: ~0.10%</li><li>236: ~0.10%</li><li>237: ~0.10%</li><li>238: ~0.10%</li><li>239: ~0.10%</li><li>240: ~0.30%</li><li>241: ~0.20%</li><li>242: ~0.10%</li><li>243: ~0.90%</li><li>244: ~0.60%</li><li>245: ~0.10%</li><li>246: ~0.70%</li><li>247: ~0.10%</li><li>248: ~0.40%</li><li>249: ~0.20%</li><li>250: ~0.10%</li><li>251: ~0.10%</li><li>252: ~0.10%</li><li>253: ~0.10%</li><li>254: ~0.20%</li><li>255: ~0.10%</li><li>256: ~0.20%</li><li>257: ~0.10%</li><li>258: ~0.10%</li><li>259: ~0.20%</li><li>260: ~0.10%</li><li>261: ~0.10%</li><li>262: ~0.20%</li><li>263: ~0.10%</li><li>264: ~0.20%</li><li>265: ~0.20%</li><li>266: ~0.10%</li><li>267: ~0.10%</li><li>268: ~0.10%</li><li>269: ~0.10%</li><li>270: ~0.10%</li><li>271: ~0.10%</li><li>272: ~0.10%</li><li>273: ~0.20%</li><li>274: ~0.10%</li><li>275: ~0.10%</li><li>276: ~0.10%</li><li>277: ~0.10%</li><li>278: ~0.10%</li><li>279: ~0.10%</li><li>280: ~0.10%</li><li>281: ~0.10%</li><li>282: ~0.10%</li><li>283: ~0.10%</li><li>284: ~0.10%</li><li>285: ~0.10%</li><li>286: ~0.10%</li><li>287: ~0.20%</li><li>288: ~0.10%</li><li>289: ~0.10%</li><li>290: ~0.20%</li><li>291: ~0.10%</li><li>292: ~0.20%</li><li>293: ~0.20%</li><li>294: ~0.10%</li><li>295: ~0.10%</li><li>296: ~0.10%</li><li>297: ~0.10%</li><li>298: ~0.10%</li><li>299: ~0.10%</li><li>300: ~0.10%</li><li>301: ~0.10%</li><li>302: ~0.10%</li><li>303: ~0.10%</li><li>304: ~0.10%</li><li>305: ~0.10%</li><li>306: ~0.10%</li><li>307: ~0.20%</li><li>308: ~0.20%</li><li>309: ~0.10%</li><li>310: ~0.10%</li><li>311: ~0.10%</li><li>312: ~0.20%</li><li>313: ~0.10%</li><li>314: ~0.20%</li><li>315: ~0.10%</li><li>316: ~0.10%</li><li>317: ~0.10%</li><li>318: ~0.10%</li><li>319: ~0.10%</li><li>320: ~0.10%</li><li>321: ~0.10%</li><li>322: ~0.10%</li><li>323: ~0.20%</li><li>324: ~0.10%</li><li>325: ~0.10%</li><li>326: ~0.10%</li><li>327: ~0.10%</li><li>328: ~0.10%</li><li>329: ~0.10%</li><li>330: ~0.20%</li><li>331: ~0.10%</li><li>332: ~0.30%</li><li>333: ~0.10%</li><li>334: ~0.10%</li><li>335: ~0.10%</li><li>336: ~0.10%</li><li>337: ~0.10%</li><li>338: ~0.10%</li><li>339: ~0.10%</li><li>340: ~0.10%</li><li>341: ~0.10%</li><li>342: ~0.10%</li><li>343: ~0.10%</li><li>344: ~0.10%</li><li>345: ~0.10%</li><li>346: ~0.10%</li><li>347: ~0.10%</li><li>348: ~0.10%</li><li>349: ~0.10%</li><li>350: ~0.10%</li><li>351: ~0.10%</li><li>352: ~0.10%</li><li>353: ~0.10%</li><li>354: ~0.10%</li><li>355: ~0.10%</li><li>356: ~0.10%</li><li>357: ~0.10%</li><li>358: ~0.20%</li><li>359: ~0.10%</li><li>360: ~0.10%</li><li>361: ~0.10%</li><li>362: ~0.10%</li><li>363: ~0.10%</li><li>364: ~0.10%</li><li>365: ~0.10%</li><li>366: ~0.20%</li><li>367: ~0.10%</li><li>368: ~0.20%</li><li>369: ~0.20%</li><li>370: ~0.20%</li><li>371: ~0.20%</li><li>372: ~0.20%</li><li>373: ~0.10%</li><li>374: ~0.10%</li><li>375: ~0.10%</li><li>376: ~0.10%</li><li>377: ~0.10%</li><li>378: ~0.10%</li><li>379: ~0.20%</li><li>380: ~0.10%</li><li>381: ~0.10%</li><li>382: ~0.10%</li><li>383: ~0.20%</li><li>384: ~0.10%</li><li>385: ~0.10%</li><li>386: ~0.30%</li><li>387: ~0.10%</li><li>388: ~0.10%</li><li>389: ~0.20%</li><li>390: ~0.10%</li><li>391: ~0.10%</li><li>392: ~0.10%</li></ul> |
|
222 |
+
* Samples:
|
223 |
+
| sentence | refined_name | label |
|
224 |
+
|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------|:---------------|
|
225 |
+
| <code>“North American CAT Perils” means certain Named Storms and Earthquake; each as defined below; in respect of that portion of losses which occur in the United States and Canada and their possessions and territories; excluding the Territory of Guam; the Territory of American Samoa; the Commonwealth of the Northern Mariana Islands; Wake Island; Johnston Atoll; Palmyra Atoll; and the State of Hawaiiterritory of Guam.</code> | <code>north american cat perils</code> | <code>0</code> |
|
226 |
+
| <code>For the purposes of this Paragraph A.; “Named Storm” means any windstorm or windstorm system that has been named by a Reporting Agency at any time in its lifecycle and ensuing losses therefrom.</code> | <code>named storm</code> | <code>1</code> |
|
227 |
+
| <code>For the purposes of this Paragraph A.; “Earthquake” means earthquake shake and ensuing losses therefrom.</code> | <code>earthquake</code> | <code>2</code> |
|
228 |
+
* Loss: [<code>BatchAllTripletLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#batchalltripletloss)
|
229 |
+
|
230 |
+
### Training Hyperparameters
|
231 |
+
#### Non-Default Hyperparameters
|
232 |
+
|
233 |
+
- `per_device_train_batch_size`: 16
|
234 |
+
- `per_device_eval_batch_size`: 16
|
235 |
+
- `num_train_epochs`: 6
|
236 |
+
- `warmup_ratio`: 0.1
|
237 |
+
|
238 |
+
#### All Hyperparameters
|
239 |
+
<details><summary>Click to expand</summary>
|
240 |
+
|
241 |
+
- `overwrite_output_dir`: False
|
242 |
+
- `do_predict`: False
|
243 |
+
- `eval_strategy`: no
|
244 |
+
- `prediction_loss_only`: True
|
245 |
+
- `per_device_train_batch_size`: 16
|
246 |
+
- `per_device_eval_batch_size`: 16
|
247 |
+
- `per_gpu_train_batch_size`: None
|
248 |
+
- `per_gpu_eval_batch_size`: None
|
249 |
+
- `gradient_accumulation_steps`: 1
|
250 |
+
- `eval_accumulation_steps`: None
|
251 |
+
- `learning_rate`: 5e-05
|
252 |
+
- `weight_decay`: 0.0
|
253 |
+
- `adam_beta1`: 0.9
|
254 |
+
- `adam_beta2`: 0.999
|
255 |
+
- `adam_epsilon`: 1e-08
|
256 |
+
- `max_grad_norm`: 1.0
|
257 |
+
- `num_train_epochs`: 6
|
258 |
+
- `max_steps`: -1
|
259 |
+
- `lr_scheduler_type`: linear
|
260 |
+
- `lr_scheduler_kwargs`: {}
|
261 |
+
- `warmup_ratio`: 0.1
|
262 |
+
- `warmup_steps`: 0
|
263 |
+
- `log_level`: passive
|
264 |
+
- `log_level_replica`: warning
|
265 |
+
- `log_on_each_node`: True
|
266 |
+
- `logging_nan_inf_filter`: True
|
267 |
+
- `save_safetensors`: True
|
268 |
+
- `save_on_each_node`: False
|
269 |
+
- `save_only_model`: False
|
270 |
+
- `restore_callback_states_from_checkpoint`: False
|
271 |
+
- `no_cuda`: False
|
272 |
+
- `use_cpu`: False
|
273 |
+
- `use_mps_device`: False
|
274 |
+
- `seed`: 42
|
275 |
+
- `data_seed`: None
|
276 |
+
- `jit_mode_eval`: False
|
277 |
+
- `use_ipex`: False
|
278 |
+
- `bf16`: False
|
279 |
+
- `fp16`: False
|
280 |
+
- `fp16_opt_level`: O1
|
281 |
+
- `half_precision_backend`: auto
|
282 |
+
- `bf16_full_eval`: False
|
283 |
+
- `fp16_full_eval`: False
|
284 |
+
- `tf32`: None
|
285 |
+
- `local_rank`: 0
|
286 |
+
- `ddp_backend`: None
|
287 |
+
- `tpu_num_cores`: None
|
288 |
+
- `tpu_metrics_debug`: False
|
289 |
+
- `debug`: []
|
290 |
+
- `dataloader_drop_last`: False
|
291 |
+
- `dataloader_num_workers`: 0
|
292 |
+
- `dataloader_prefetch_factor`: None
|
293 |
+
- `past_index`: -1
|
294 |
+
- `disable_tqdm`: False
|
295 |
+
- `remove_unused_columns`: True
|
296 |
+
- `label_names`: None
|
297 |
+
- `load_best_model_at_end`: False
|
298 |
+
- `ignore_data_skip`: False
|
299 |
+
- `fsdp`: []
|
300 |
+
- `fsdp_min_num_params`: 0
|
301 |
+
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
|
302 |
+
- `fsdp_transformer_layer_cls_to_wrap`: None
|
303 |
+
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
|
304 |
+
- `deepspeed`: None
|
305 |
+
- `label_smoothing_factor`: 0.0
|
306 |
+
- `optim`: adamw_torch
|
307 |
+
- `optim_args`: None
|
308 |
+
- `adafactor`: False
|
309 |
+
- `group_by_length`: False
|
310 |
+
- `length_column_name`: length
|
311 |
+
- `ddp_find_unused_parameters`: None
|
312 |
+
- `ddp_bucket_cap_mb`: None
|
313 |
+
- `ddp_broadcast_buffers`: False
|
314 |
+
- `dataloader_pin_memory`: True
|
315 |
+
- `dataloader_persistent_workers`: False
|
316 |
+
- `skip_memory_metrics`: True
|
317 |
+
- `use_legacy_prediction_loop`: False
|
318 |
+
- `push_to_hub`: False
|
319 |
+
- `resume_from_checkpoint`: None
|
320 |
+
- `hub_model_id`: None
|
321 |
+
- `hub_strategy`: every_save
|
322 |
+
- `hub_private_repo`: False
|
323 |
+
- `hub_always_push`: False
|
324 |
+
- `gradient_checkpointing`: False
|
325 |
+
- `gradient_checkpointing_kwargs`: None
|
326 |
+
- `include_inputs_for_metrics`: False
|
327 |
+
- `eval_do_concat_batches`: True
|
328 |
+
- `fp16_backend`: auto
|
329 |
+
- `push_to_hub_model_id`: None
|
330 |
+
- `push_to_hub_organization`: None
|
331 |
+
- `mp_parameters`:
|
332 |
+
- `auto_find_batch_size`: False
|
333 |
+
- `full_determinism`: False
|
334 |
+
- `torchdynamo`: None
|
335 |
+
- `ray_scope`: last
|
336 |
+
- `ddp_timeout`: 1800
|
337 |
+
- `torch_compile`: False
|
338 |
+
- `torch_compile_backend`: None
|
339 |
+
- `torch_compile_mode`: None
|
340 |
+
- `dispatch_batches`: None
|
341 |
+
- `split_batches`: None
|
342 |
+
- `include_tokens_per_second`: False
|
343 |
+
- `include_num_input_tokens_seen`: False
|
344 |
+
- `neftune_noise_alpha`: None
|
345 |
+
- `optim_target_modules`: None
|
346 |
+
- `batch_eval_metrics`: False
|
347 |
+
- `batch_sampler`: batch_sampler
|
348 |
+
- `multi_dataset_batch_sampler`: proportional
|
349 |
+
|
350 |
+
</details>
|
351 |
+
|
352 |
+
### Training Logs
|
353 |
+
| Epoch | Step | Training Loss |
|
354 |
+
|:------:|:----:|:-------------:|
|
355 |
+
| 0.7163 | 500 | 1.2076 |
|
356 |
+
| 1.4327 | 1000 | 1.3144 |
|
357 |
+
| 2.1490 | 1500 | 1.1513 |
|
358 |
+
| 2.8653 | 2000 | 0.8245 |
|
359 |
+
| 3.5817 | 2500 | 0.6458 |
|
360 |
+
| 4.2980 | 3000 | 0.4437 |
|
361 |
+
| 5.0143 | 3500 | 0.2403 |
|
362 |
+
| 5.7307 | 4000 | 0.1507 |
|
363 |
+
|
364 |
+
|
365 |
+
### Framework Versions
|
366 |
+
- Python: 3.10.12
|
367 |
+
- Sentence Transformers: 3.0.1
|
368 |
+
- Transformers: 4.41.2
|
369 |
+
- PyTorch: 2.3.0+cu121
|
370 |
+
- Accelerate: 0.31.0
|
371 |
+
- Datasets: 2.20.0
|
372 |
+
- Tokenizers: 0.19.1
|
373 |
+
|
374 |
+
## Citation
|
375 |
+
|
376 |
+
### BibTeX
|
377 |
+
|
378 |
+
#### Sentence Transformers
|
379 |
+
```bibtex
|
380 |
+
@inproceedings{reimers-2019-sentence-bert,
|
381 |
+
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
|
382 |
+
author = "Reimers, Nils and Gurevych, Iryna",
|
383 |
+
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
|
384 |
+
month = "11",
|
385 |
+
year = "2019",
|
386 |
+
publisher = "Association for Computational Linguistics",
|
387 |
+
url = "https://arxiv.org/abs/1908.10084",
|
388 |
+
}
|
389 |
+
```
|
390 |
+
|
391 |
+
#### BatchAllTripletLoss
|
392 |
+
```bibtex
|
393 |
+
@misc{hermans2017defense,
|
394 |
+
title={In Defense of the Triplet Loss for Person Re-Identification},
|
395 |
+
author={Alexander Hermans and Lucas Beyer and Bastian Leibe},
|
396 |
+
year={2017},
|
397 |
+
eprint={1703.07737},
|
398 |
+
archivePrefix={arXiv},
|
399 |
+
primaryClass={cs.CV}
|
400 |
+
}
|
401 |
+
```
|
402 |
+
|
403 |
+
<!--
|
404 |
+
## Glossary
|
405 |
+
|
406 |
+
*Clearly define terms in order to be accessible across audiences.*
|
407 |
+
-->
|
408 |
+
|
409 |
+
<!--
|
410 |
+
## Model Card Authors
|
411 |
+
|
412 |
+
*Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
|
413 |
+
-->
|
414 |
+
|
415 |
+
<!--
|
416 |
+
## Model Card Contact
|
417 |
+
|
418 |
+
*Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
|
419 |
+
-->
|
config.json
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "sentence-transformers/paraphrase-multilingual-mpnet-base-v2",
|
3 |
+
"architectures": [
|
4 |
+
"XLMRobertaModel"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"bos_token_id": 0,
|
8 |
+
"classifier_dropout": null,
|
9 |
+
"eos_token_id": 2,
|
10 |
+
"gradient_checkpointing": false,
|
11 |
+
"hidden_act": "gelu",
|
12 |
+
"hidden_dropout_prob": 0.1,
|
13 |
+
"hidden_size": 768,
|
14 |
+
"initializer_range": 0.02,
|
15 |
+
"intermediate_size": 3072,
|
16 |
+
"layer_norm_eps": 1e-05,
|
17 |
+
"max_position_embeddings": 514,
|
18 |
+
"model_type": "xlm-roberta",
|
19 |
+
"num_attention_heads": 12,
|
20 |
+
"num_hidden_layers": 12,
|
21 |
+
"output_past": true,
|
22 |
+
"pad_token_id": 1,
|
23 |
+
"position_embedding_type": "absolute",
|
24 |
+
"torch_dtype": "float32",
|
25 |
+
"transformers_version": "4.41.2",
|
26 |
+
"type_vocab_size": 1,
|
27 |
+
"use_cache": true,
|
28 |
+
"vocab_size": 250002
|
29 |
+
}
|
config_sentence_transformers.json
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"__version__": {
|
3 |
+
"sentence_transformers": "3.0.1",
|
4 |
+
"transformers": "4.41.2",
|
5 |
+
"pytorch": "2.3.0+cu121"
|
6 |
+
},
|
7 |
+
"prompts": {},
|
8 |
+
"default_prompt_name": null,
|
9 |
+
"similarity_fn_name": null
|
10 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:451a9b5e947ef58c0f7e9f5640b51a13a429178aedc95f57a164f9e9b594f37c
|
3 |
+
size 1112197096
|
modules.json
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[
|
2 |
+
{
|
3 |
+
"idx": 0,
|
4 |
+
"name": "0",
|
5 |
+
"path": "",
|
6 |
+
"type": "sentence_transformers.models.Transformer"
|
7 |
+
},
|
8 |
+
{
|
9 |
+
"idx": 1,
|
10 |
+
"name": "1",
|
11 |
+
"path": "1_Pooling",
|
12 |
+
"type": "sentence_transformers.models.Pooling"
|
13 |
+
}
|
14 |
+
]
|
sentence_bert_config.json
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"max_seq_length": 128,
|
3 |
+
"do_lower_case": false
|
4 |
+
}
|
sentencepiece.bpe.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cfc8146abe2a0488e9e2a0c56de7952f7c11ab059eca145a0a727afce0db2865
|
3 |
+
size 5069051
|
special_tokens_map.json
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"cls_token": {
|
10 |
+
"content": "<s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"eos_token": {
|
17 |
+
"content": "</s>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
},
|
23 |
+
"mask_token": {
|
24 |
+
"content": "<mask>",
|
25 |
+
"lstrip": true,
|
26 |
+
"normalized": false,
|
27 |
+
"rstrip": false,
|
28 |
+
"single_word": false
|
29 |
+
},
|
30 |
+
"pad_token": {
|
31 |
+
"content": "<pad>",
|
32 |
+
"lstrip": false,
|
33 |
+
"normalized": false,
|
34 |
+
"rstrip": false,
|
35 |
+
"single_word": false
|
36 |
+
},
|
37 |
+
"sep_token": {
|
38 |
+
"content": "</s>",
|
39 |
+
"lstrip": false,
|
40 |
+
"normalized": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"single_word": false
|
43 |
+
},
|
44 |
+
"unk_token": {
|
45 |
+
"content": "<unk>",
|
46 |
+
"lstrip": false,
|
47 |
+
"normalized": false,
|
48 |
+
"rstrip": false,
|
49 |
+
"single_word": false
|
50 |
+
}
|
51 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cad551d5600a84242d0973327029452a1e3672ba6313c2a3c3d69c4310e12719
|
3 |
+
size 17082987
|
tokenizer_config.json
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"added_tokens_decoder": {
|
3 |
+
"0": {
|
4 |
+
"content": "<s>",
|
5 |
+
"lstrip": false,
|
6 |
+
"normalized": false,
|
7 |
+
"rstrip": false,
|
8 |
+
"single_word": false,
|
9 |
+
"special": true
|
10 |
+
},
|
11 |
+
"1": {
|
12 |
+
"content": "<pad>",
|
13 |
+
"lstrip": false,
|
14 |
+
"normalized": false,
|
15 |
+
"rstrip": false,
|
16 |
+
"single_word": false,
|
17 |
+
"special": true
|
18 |
+
},
|
19 |
+
"2": {
|
20 |
+
"content": "</s>",
|
21 |
+
"lstrip": false,
|
22 |
+
"normalized": false,
|
23 |
+
"rstrip": false,
|
24 |
+
"single_word": false,
|
25 |
+
"special": true
|
26 |
+
},
|
27 |
+
"3": {
|
28 |
+
"content": "<unk>",
|
29 |
+
"lstrip": false,
|
30 |
+
"normalized": false,
|
31 |
+
"rstrip": false,
|
32 |
+
"single_word": false,
|
33 |
+
"special": true
|
34 |
+
},
|
35 |
+
"250001": {
|
36 |
+
"content": "<mask>",
|
37 |
+
"lstrip": true,
|
38 |
+
"normalized": false,
|
39 |
+
"rstrip": false,
|
40 |
+
"single_word": false,
|
41 |
+
"special": true
|
42 |
+
}
|
43 |
+
},
|
44 |
+
"bos_token": "<s>",
|
45 |
+
"clean_up_tokenization_spaces": true,
|
46 |
+
"cls_token": "<s>",
|
47 |
+
"eos_token": "</s>",
|
48 |
+
"mask_token": "<mask>",
|
49 |
+
"max_length": 128,
|
50 |
+
"model_max_length": 128,
|
51 |
+
"pad_to_multiple_of": null,
|
52 |
+
"pad_token": "<pad>",
|
53 |
+
"pad_token_type_id": 0,
|
54 |
+
"padding_side": "right",
|
55 |
+
"sep_token": "</s>",
|
56 |
+
"stride": 0,
|
57 |
+
"tokenizer_class": "XLMRobertaTokenizer",
|
58 |
+
"truncation_side": "right",
|
59 |
+
"truncation_strategy": "longest_first",
|
60 |
+
"unk_token": "<unk>"
|
61 |
+
}
|