leofn3 commited on
Commit
befc05e
1 Parent(s): c1b0920

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "leofn3/modelo_racismo",
3
  "architectures": [
4
  "DebertaV2ForSequenceClassification"
5
  ],
 
1
  {
2
+ "_name_or_path": "PORTULAN/albertina-ptbr",
3
  "architectures": [
4
  "DebertaV2ForSequenceClassification"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:50280d15e05ef0646cbbe6c520626f9f78e1296eec8525c7e6875e3552161eed
3
  size 3547966149
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:71687f29c16360ae716f5dd12987841cb7a5a8f5ec2a89797d69bcc2b8ab006e
3
  size 3547966149
tokenizer.json CHANGED
@@ -1,21 +1,7 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 512,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
- "padding": {
10
- "strategy": {
11
- "Fixed": 512
12
- },
13
- "direction": "Right",
14
- "pad_to_multiple_of": null,
15
- "pad_id": 0,
16
- "pad_type_id": 0,
17
- "pad_token": "[PAD]"
18
- },
19
  "added_tokens": [
20
  {
21
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
tokenizer_config.json CHANGED
@@ -5,19 +5,12 @@
5
  "do_lower_case": false,
6
  "eos_token": "[SEP]",
7
  "mask_token": "[MASK]",
8
- "max_length": 512,
9
  "model_max_length": 512,
10
- "pad_to_multiple_of": null,
11
  "pad_token": "[PAD]",
12
- "pad_token_type_id": 0,
13
- "padding_side": "right",
14
  "sep_token": "[SEP]",
15
  "sp_model_kwargs": {},
16
  "split_by_punct": false,
17
- "stride": 0,
18
  "tokenizer_class": "DebertaV2Tokenizer",
19
- "truncation_side": "right",
20
- "truncation_strategy": "longest_first",
21
  "unk_token": "[UNK]",
22
  "vocab_type": "spm"
23
  }
 
5
  "do_lower_case": false,
6
  "eos_token": "[SEP]",
7
  "mask_token": "[MASK]",
 
8
  "model_max_length": 512,
 
9
  "pad_token": "[PAD]",
 
 
10
  "sep_token": "[SEP]",
11
  "sp_model_kwargs": {},
12
  "split_by_punct": false,
 
13
  "tokenizer_class": "DebertaV2Tokenizer",
 
 
14
  "unk_token": "[UNK]",
15
  "vocab_type": "spm"
16
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ae01b4596063fe0dabced701a3b048977c4e6815291e3b109d925efb2df4f541
3
  size 4027
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:deb3c836210b37fdc9617cc63847e7d66d125cb2a332bb2c69c32ab9ad107559
3
  size 4027