remotejob commited on
Commit
a9178cb
1 Parent(s): 1699668

commit 2 from juno

Browse files
Files changed (6) hide show
  1. config.json +2 -3
  2. pytorch_model.bin +2 -2
  3. rust_model.ot +2 -2
  4. tokenizer.json +0 -0
  5. tokenizer_config.json +1 -1
  6. vocab.txt +0 -0
config.json CHANGED
@@ -1,11 +1,10 @@
1
  {
2
- "_name_or_path": "bert-base-uncased",
3
  "architectures": [
4
  "BertForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
8
- "gradient_checkpointing": false,
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
@@ -163,5 +162,5 @@
163
  "transformers_version": "4.12.3",
164
  "type_vocab_size": 2,
165
  "use_cache": true,
166
- "vocab_size": 30522
167
  }
1
  {
2
+ "_name_or_path": "TurkuNLP/bert-base-finnish-uncased-v1",
3
  "architectures": [
4
  "BertForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "classifier_dropout": null,
 
8
  "hidden_act": "gelu",
9
  "hidden_dropout_prob": 0.1,
10
  "hidden_size": 768,
162
  "transformers_version": "4.12.3",
163
  "type_vocab_size": 2,
164
  "use_cache": true,
165
+ "vocab_size": 50101
166
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cdd456fdcf738176d9c28ae0501dfe73be3d4336512bb5804f65c9eb5e2c1f8e
3
- size 438222253
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:edbf7edcee68d4d23b9fb313c1d91393f4cc6a1fb8deb4b7646514ea636a1cfb
3
+ size 498368941
rust_model.ot CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ac83d4fb4ec24b3fc1f341905a628b1192f3725e58f09839af24728c973cc793
3
- size 438203765
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:844dd955ffa92ae773150abb1be18d5e7614a0c4674ac909c2c2fecd0728d616
3
+ size 498350453
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "bert-base-uncased", "tokenizer_class": "BertTokenizer"}
1
+ {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "TurkuNLP/bert-base-finnish-uncased-v1", "tokenizer_class": "BertTokenizer"}
vocab.txt CHANGED
The diff for this file is too large to render. See raw diff