finiteautomata commited on
Commit
e0be98e
1 Parent(s): 0885da4

Update with new preprocessing

Browse files
added_tokens.json ADDED
@@ -0,0 +1 @@
 
1
+ {"[EMOJI]": 31004, "[USER]": 31002, "[HASHTAG]": 31003}
config.json CHANGED
@@ -1,23 +1,24 @@
1
  {
 
2
  "architectures": [
3
  "BertForSequenceClassification"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "gradient_checkpointing": false,
7
  "hidden_act": "gelu",
8
- "hidden_dropout_prob": 0.2,
9
  "hidden_size": 768,
10
  "id2label": {
11
- "0": "NEG",
12
  "1": "NEU",
13
- "2": "POS"
14
  },
15
  "initializer_range": 0.02,
16
  "intermediate_size": 3072,
17
  "label2id": {
18
- "NEG": 0,
19
  "NEU": 1,
20
- "POS": 2
21
  },
22
  "layer_norm_eps": 1e-12,
23
  "max_position_embeddings": 512,
@@ -26,7 +27,10 @@
26
  "num_hidden_layers": 12,
27
  "output_past": true,
28
  "pad_token_id": 1,
29
- "return_dict": true,
 
 
30
  "type_vocab_size": 2,
31
- "vocab_size": 31002
 
32
  }
1
  {
2
+ "_name_or_path": "dccuchile/bert-base-spanish-wwm-cased",
3
  "architectures": [
4
  "BertForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "gradient_checkpointing": false,
8
  "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
11
  "id2label": {
12
+ "0": "N",
13
  "1": "NEU",
14
+ "2": "P"
15
  },
16
  "initializer_range": 0.02,
17
  "intermediate_size": 3072,
18
  "label2id": {
19
+ "N": 0,
20
  "NEU": 1,
21
+ "P": 2
22
  },
23
  "layer_norm_eps": 1e-12,
24
  "max_position_embeddings": 512,
27
  "num_hidden_layers": 12,
28
  "output_past": true,
29
  "pad_token_id": 1,
30
+ "position_embedding_type": "absolute",
31
+ "problem_type": "single_label_classification",
32
+ "transformers_version": "4.6.1",
33
  "type_vocab_size": 2,
34
+ "use_cache": true,
35
+ "vocab_size": 31005
36
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:20536fde3deecaba37ff4b84c4116259ad86c419ad4a464cb1b2387993674d22
3
- size 439465385
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae62aae323fb95e9a89a10783c6d54565251135d6e5e3ebfbaaa1cede9b26c8d
3
+ size 439508881
special_tokens_map.json CHANGED
@@ -1 +1 @@
1
- {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "additional_special_tokens": ["[USER]", "[HASHTAG]", "[EMOJI]"]}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"do_lower_case": false, "special_tokens_map_file": "/home/jmperez/.cache/torch/transformers/aa40d465a73f3614a619f68336225bb02e1d0917937da285039a618a5135724d.275045728fbf41c11d3dae08b8742c054377e18d92cc7b72b6351152a99b64e4", "full_tokenizer_file": null}
1
+ {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": false, "name_or_path": "dccuchile/bert-base-spanish-wwm-cased", "do_basic_tokenize": true, "never_split": null, "model_max_length": 512, "special_tokens_map_file": "/home/jmperez/.cache/huggingface/transformers/9848a00af462c42dfb4ec88ef438fbab5256330f7f6f50badc48d277f9367d49.f982506b52498d4adb4bd491f593dc92b2ef6be61bfdbe9d30f53f963f9f5b66"}
vocab.txt CHANGED
@@ -939,42 +939,42 @@
939
  [unused932]
940
  [unused933]
941
  [unused934]
942
- [unused935]
943
- [unused936]
944
- [unused937]
945
- [unused938]
946
- [unused939]
947
- [unused940]
948
- [unused941]
949
- [unused942]
950
- [unused943]
951
- [unused944]
952
- [unused945]
953
- [unused946]
954
- [unused947]
955
- [unused948]
956
- [unused949]
957
- [unused950]
958
- [unused951]
959
- [unused952]
960
- [unused953]
961
- [unused954]
962
- [unused955]
963
- [unused956]
964
- [unused957]
965
- [unused958]
966
- [unused959]
967
- [unused960]
968
- [unused961]
969
- [unused962]
970
- [unused963]
971
- [unused964]
972
- [unused965]
973
- [unused966]
974
- [unused967]
975
- [unused968]
976
- [unused969]
977
- [unused970]
978
  ##7
979
  7
980
  ##w
939
  [unused932]
940
  [unused933]
941
  [unused934]
942
+ ##|
943
+ |
944
+ ##}
945
+ }
946
+ ##{
947
+ {
948
+ ##_
949
+ _
950
+ ##+
951
+ +
952
+ ##*
953
+ *
954
+ ##&
955
+ &
956
+ ##$
957
+ $
958
+ ##]
959
+ ]
960
+ ##[
961
+ [
962
+ ##=
963
+ =
964
+ ##>
965
+ >
966
+ ##<
967
+ <
968
+ ##@
969
+ @
970
+ ##\
971
+ \
972
+ ##/
973
+ /
974
+ ##%
975
+ %
976
+ ##;
977
+ ;
978
  ##7
979
  7
980
  ##w