antoinelouis commited on
Commit
dcdc1e3
1 Parent(s): 7fccd3c
added_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "<unk>NOTUSED": 32005
3
+ }
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "camembert-base",
3
  "architectures": [
4
  "CamembertForSequenceClassification"
5
  ],
@@ -27,7 +27,7 @@
27
  "pad_token_id": 1,
28
  "position_embedding_type": "absolute",
29
  "torch_dtype": "float32",
30
- "transformers_version": "4.28.1",
31
  "type_vocab_size": 1,
32
  "use_cache": true,
33
  "vocab_size": 32005
 
1
  {
2
+ "_name_or_path": "almanach/camembert-base",
3
  "architectures": [
4
  "CamembertForSequenceClassification"
5
  ],
 
27
  "pad_token_id": 1,
28
  "position_embedding_type": "absolute",
29
  "torch_dtype": "float32",
30
+ "transformers_version": "4.36.2",
31
  "type_vocab_size": 1,
32
  "use_cache": true,
33
  "vocab_size": 32005
dev_scores.csv DELETED
@@ -1,2 +0,0 @@
1
- r-precision,mrr@10,recall@10,recall@20,recall@50,recall@100,model
2
- 35.65,50.44,82.95,91.50,96.80,98.80,crossencoder-camembert-base-mmarcoFR
 
 
 
mmarco_smallldev_scores.csv ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ epoch,steps,cutoff,mrr,recall,r-precision
2
+ 0,20000,5,0.3195487106017192,0.4914517669531996,0.21380133715377267
3
+ 0,20000,10,0.33395489380088234,0.5982808022922637,0.21380133715377267
4
+ 0,20000,20,0.340528721477873,0.6925382043935052,0.21380133715377267
5
+ 0,20000,50,0.3438538366740518,0.7952960840496658,0.21380133715377267
6
+ 0,20000,100,0.3446662186199474,0.8534383954154727,0.21380133715377267
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:37f931066b603cc10af0d660c2e498dc573c1fb3bebdc109d10bb5a258c2136c
3
- size 442519228
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a991fc6f0bf126014aecbe313963ea515be39aabb2d8cf10d5ff6927ab3b9a4
3
+ size 442515028
pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:324dc3236fb67814bab4d2faccf7293865fa64333c0347a285e812d913c70f34
3
- size 442564277
 
 
 
 
special_tokens_map.json CHANGED
@@ -1,7 +1,8 @@
1
  {
2
  "additional_special_tokens": [
3
  "<s>NOTUSED",
4
- "</s>NOTUSED"
 
5
  ],
6
  "bos_token": "<s>",
7
  "cls_token": "<s>",
 
1
  {
2
  "additional_special_tokens": [
3
  "<s>NOTUSED",
4
+ "</s>NOTUSED",
5
+ "<unk>NOTUSED"
6
  ],
7
  "bos_token": "<s>",
8
  "cls_token": "<s>",
tokenizer.json CHANGED
@@ -2,7 +2,7 @@
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
- "max_length": 512,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
@@ -77,6 +77,15 @@
77
  "rstrip": false,
78
  "normalized": false,
79
  "special": true
 
 
 
 
 
 
 
 
 
80
  }
81
  ],
82
  "normalizer": {
@@ -92,7 +101,8 @@
92
  {
93
  "type": "Metaspace",
94
  "replacement": "▁",
95
- "add_prefix_space": true
 
96
  }
97
  ]
98
  },
@@ -180,7 +190,8 @@
180
  "decoder": {
181
  "type": "Metaspace",
182
  "replacement": "▁",
183
- "add_prefix_space": true
 
184
  },
185
  "model": {
186
  "type": "Unigram",
@@ -128206,6 +128217,7 @@
128206
  "<mask>",
128207
  0.0
128208
  ]
128209
- ]
 
128210
  }
128211
  }
 
2
  "version": "1.0",
3
  "truncation": {
4
  "direction": "Right",
5
+ "max_length": 256,
6
  "strategy": "LongestFirst",
7
  "stride": 0
8
  },
 
77
  "rstrip": false,
78
  "normalized": false,
79
  "special": true
80
+ },
81
+ {
82
+ "id": 32005,
83
+ "content": "<unk>NOTUSED",
84
+ "single_word": false,
85
+ "lstrip": false,
86
+ "rstrip": false,
87
+ "normalized": false,
88
+ "special": true
89
  }
90
  ],
91
  "normalizer": {
 
101
  {
102
  "type": "Metaspace",
103
  "replacement": "▁",
104
+ "add_prefix_space": true,
105
+ "prepend_scheme": "always"
106
  }
107
  ]
108
  },
 
190
  "decoder": {
191
  "type": "Metaspace",
192
  "replacement": "▁",
193
+ "add_prefix_space": true,
194
+ "prepend_scheme": "always"
195
  },
196
  "model": {
197
  "type": "Unigram",
 
128217
  "<mask>",
128218
  0.0
128219
  ]
128220
+ ],
128221
+ "byte_fallback": false
128222
  }
128223
  }
tokenizer_config.json CHANGED
@@ -1,21 +1,81 @@
1
  {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  "additional_special_tokens": [
3
  "<s>NOTUSED",
4
- "</s>NOTUSED"
 
5
  ],
6
  "bos_token": "<s>",
7
  "clean_up_tokenization_spaces": true,
8
  "cls_token": "<s>",
9
  "eos_token": "</s>",
10
- "mask_token": {
11
- "__type": "AddedToken",
12
- "content": "<mask>",
13
- "lstrip": true,
14
- "normalized": true,
15
- "rstrip": false,
16
- "single_word": false
17
- },
18
- "model_max_length": 512,
19
  "pad_token": "<pad>",
20
  "sep_token": "</s>",
21
  "tokenizer_class": "CamembertTokenizer",
 
1
  {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<s>NOTUSED",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<pad>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "</s>NOTUSED",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "4": {
28
+ "content": "<unk>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "5": {
36
+ "content": "<s>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ },
43
+ "6": {
44
+ "content": "</s>",
45
+ "lstrip": false,
46
+ "normalized": false,
47
+ "rstrip": false,
48
+ "single_word": false,
49
+ "special": true
50
+ },
51
+ "32004": {
52
+ "content": "<mask>",
53
+ "lstrip": true,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false,
57
+ "special": true
58
+ },
59
+ "32005": {
60
+ "content": "<unk>NOTUSED",
61
+ "lstrip": false,
62
+ "normalized": false,
63
+ "rstrip": false,
64
+ "single_word": false,
65
+ "special": true
66
+ }
67
+ },
68
  "additional_special_tokens": [
69
  "<s>NOTUSED",
70
+ "</s>NOTUSED",
71
+ "<unk>NOTUSED"
72
  ],
73
  "bos_token": "<s>",
74
  "clean_up_tokenization_spaces": true,
75
  "cls_token": "<s>",
76
  "eos_token": "</s>",
77
+ "mask_token": "<mask>",
78
+ "model_max_length": 256,
 
 
 
 
 
 
 
79
  "pad_token": "<pad>",
80
  "sep_token": "</s>",
81
  "tokenizer_class": "CamembertTokenizer",