HarryCovert commited on
Commit
961307b
1 Parent(s): 92b2220

Upload 6 files

Browse files
config.json CHANGED
@@ -1,31 +1,22 @@
1
  {
2
- "_name_or_path": "camembert/camembert-base",
3
  "architectures": [
4
- "CamembertForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
- "bos_token_id": 0,
8
  "classifier_dropout": null,
9
- "eos_token_id": 2,
10
- "eos_token_ids": 0,
11
  "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 768,
14
  "id2label": {
15
- "0": "B-Maladie",
16
- "1": "B-Ravageur",
17
- "2": "I-Maladie",
18
- "3": "I-Ravageur",
19
- "4": "O"
20
  },
21
  "initializer_range": 0.02,
22
  "intermediate_size": 3072,
23
  "label2id": {
24
- "B-Maladie": 0,
25
- "B-Ravageur": 1,
26
- "I-Maladie": 2,
27
- "I-Ravageur": 3,
28
- "O": 4
29
  },
30
  "layer_norm_eps": 1e-05,
31
  "max_position_embeddings": 514,
@@ -33,11 +24,11 @@
33
  "num_attention_heads": 12,
34
  "num_hidden_layers": 12,
35
  "output_past": true,
36
- "pad_token_id": 0,
37
  "position_embedding_type": "absolute",
38
  "torch_dtype": "float32",
39
- "transformers_version": "4.21.0",
40
  "type_vocab_size": 1,
41
  "use_cache": true,
42
  "vocab_size": 32005
43
- }
 
1
  {
 
2
  "architectures": [
3
+ "CamembertForSequenceClassification"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
+ "bos_token_id": 5,
7
  "classifier_dropout": null,
8
+ "eos_token_id": 6,
9
+ "gradient_checkpointing": false,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
  "hidden_size": 768,
13
  "id2label": {
14
+ "0": "Observation"
 
 
 
 
15
  },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
  "label2id": {
19
+ "Observation": 0
 
 
 
 
20
  },
21
  "layer_norm_eps": 1e-05,
22
  "max_position_embeddings": 514,
 
24
  "num_attention_heads": 12,
25
  "num_hidden_layers": 12,
26
  "output_past": true,
27
+ "pad_token_id": 1,
28
  "position_embedding_type": "absolute",
29
  "torch_dtype": "float32",
30
+ "transformers_version": "4.18.0",
31
  "type_vocab_size": 1,
32
  "use_cache": true,
33
  "vocab_size": 32005
34
+ }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:594d1f146ec0ec0534db6ea2b1de2cde2584091f53170a7ba0fe0946bbdcf8eb
3
- size 440211889
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f80ce9831f104e7860a1a07101096fc52eea5e311492e8ad88cc082d39e97b9
3
+ size 442562541
special_tokens_map.json CHANGED
@@ -1,19 +1 @@
1
- {
2
- "additional_special_tokens": [
3
- "<s>NOTUSED",
4
- "</s>NOTUSED"
5
- ],
6
- "bos_token": "<s>",
7
- "cls_token": "<s>",
8
- "eos_token": "</s>",
9
- "mask_token": {
10
- "content": "<mask>",
11
- "lstrip": true,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "<pad>",
17
- "sep_token": "</s>",
18
- "unk_token": "<unk>"
19
- }
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}, "additional_special_tokens": ["<s>NOTUSED", "</s>NOTUSED"]}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,24 +1 @@
1
- {
2
- "additional_special_tokens": [
3
- "<s>NOTUSED",
4
- "</s>NOTUSED"
5
- ],
6
- "bos_token": "<s>",
7
- "cls_token": "<s>",
8
- "eos_token": "</s>",
9
- "mask_token": {
10
- "__type": "AddedToken",
11
- "content": "<mask>",
12
- "lstrip": true,
13
- "normalized": true,
14
- "rstrip": false,
15
- "single_word": false
16
- },
17
- "name_or_path": "camembert/camembert-base",
18
- "pad_token": "<pad>",
19
- "sep_token": "</s>",
20
- "sp_model_kwargs": {},
21
- "special_tokens_map_file": null,
22
- "tokenizer_class": "CamembertTokenizer",
23
- "unk_token": "<unk>"
24
- }
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "cls_token": "<s>", "unk_token": "<unk>", "pad_token": "<pad>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "additional_special_tokens": ["<s>NOTUSED", "</s>NOTUSED"], "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "camembert-base", "tokenizer_class": "CamembertTokenizer"}