greasyFinger commited on
Commit
30be128
1 Parent(s): f6af0de

Training in progress, epoch 1

Browse files
Files changed (2) hide show
  1. config.json +5 -4
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,10 +1,11 @@
1
  {
2
- "_name_or_path": "cl-tohoku/bert-base-japanese",
3
  "architectures": [
4
  "BertForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
 
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
@@ -23,13 +24,13 @@
23
  "model_type": "bert",
24
  "num_attention_heads": 12,
25
  "num_hidden_layers": 12,
26
- "pad_token_id": 0,
 
27
  "position_embedding_type": "absolute",
28
  "problem_type": "single_label_classification",
29
- "tokenizer_class": "BertJapaneseTokenizer",
30
  "torch_dtype": "float32",
31
  "transformers_version": "4.38.2",
32
  "type_vocab_size": 2,
33
  "use_cache": true,
34
- "vocab_size": 32000
35
  }
 
1
  {
2
+ "_name_or_path": "dccuchile/bert-base-spanish-wwm-cased",
3
  "architectures": [
4
  "BertForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
 
24
  "model_type": "bert",
25
  "num_attention_heads": 12,
26
  "num_hidden_layers": 12,
27
+ "output_past": true,
28
+ "pad_token_id": 1,
29
  "position_embedding_type": "absolute",
30
  "problem_type": "single_label_classification",
 
31
  "torch_dtype": "float32",
32
  "transformers_version": "4.38.2",
33
  "type_vocab_size": 2,
34
  "use_cache": true,
35
+ "vocab_size": 31002
36
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ac8db188db15854812fb63d5c4523fbd8c6dca28a11d243794b9800a479857cd
3
- size 442499064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f65872546ebfc81c300be98301b5d8904a647dfdf5757074959ce52b130bb703
3
+ size 439433208