Training in progress, step 2500
Browse files- .gitignore +1 -0
- config.json +25 -0
- logs/1684462715.2757523/events.out.tfevents.1684462715.w771.3217160.1 +3 -0
- logs/1684462832.1228318/events.out.tfevents.1684462832.w771.3217967.1 +3 -0
- logs/1684462893.6147869/events.out.tfevents.1684462893.w771.3218471.1 +3 -0
- logs/1684462919.487848/events.out.tfevents.1684462919.w771.3218753.1 +3 -0
- logs/1684463211.8126497/events.out.tfevents.1684463211.w771.3221185.1 +3 -0
- logs/1684463370.0653572/events.out.tfevents.1684463370.w771.3222110.1 +3 -0
- logs/events.out.tfevents.1684462715.w771.3217160.0 +3 -0
- logs/events.out.tfevents.1684462832.w771.3217967.0 +3 -0
- logs/events.out.tfevents.1684462893.w771.3218471.0 +3 -0
- logs/events.out.tfevents.1684462919.w771.3218753.0 +3 -0
- logs/events.out.tfevents.1684463211.w771.3221185.0 +3 -0
- logs/events.out.tfevents.1684463370.w771.3222110.0 +3 -0
- merges.txt +0 -0
- pytorch_model.bin +3 -0
- special_tokens_map.json +15 -0
- tokenizer.json +0 -0
- tokenizer_config.json +23 -0
- training_args.bin +3 -0
- vocab.json +0 -0
.gitignore
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
checkpoint-*/
|
config.json
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "bert-tiny",
|
3 |
+
"architectures": [
|
4 |
+
"BertForMaskedLM"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.1,
|
7 |
+
"classifier_dropout": null,
|
8 |
+
"hidden_act": "gelu",
|
9 |
+
"hidden_dropout_prob": 0.1,
|
10 |
+
"hidden_size": 128,
|
11 |
+
"initializer_range": 0.02,
|
12 |
+
"intermediate_size": 512,
|
13 |
+
"layer_norm_eps": 1e-12,
|
14 |
+
"max_position_embeddings": 512,
|
15 |
+
"model_type": "bert",
|
16 |
+
"num_attention_heads": 2,
|
17 |
+
"num_hidden_layers": 2,
|
18 |
+
"pad_token_id": 0,
|
19 |
+
"position_embedding_type": "absolute",
|
20 |
+
"torch_dtype": "float32",
|
21 |
+
"transformers_version": "4.26.1",
|
22 |
+
"type_vocab_size": 2,
|
23 |
+
"use_cache": true,
|
24 |
+
"vocab_size": 50265
|
25 |
+
}
|
logs/1684462715.2757523/events.out.tfevents.1684462715.w771.3217160.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5b4db49ba696293ac14eb7205d5311fe5c8992fcc86119a9b0d4639ea2a8de6c
|
3 |
+
size 5666
|
logs/1684462832.1228318/events.out.tfevents.1684462832.w771.3217967.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4d13debb705b49eb8afe08196966a1cff443eff0bf2f4ebca853ba63980bdd7e
|
3 |
+
size 5666
|
logs/1684462893.6147869/events.out.tfevents.1684462893.w771.3218471.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:576a6f2fef16a9b7716fbf5f6a4af217f31346d4bf28ce3c2ac2a7c4df6f3357
|
3 |
+
size 5666
|
logs/1684462919.487848/events.out.tfevents.1684462919.w771.3218753.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:14336af70f986ebc1bb1c5c82b3883f137fe9ea1d850da2e0efaab878993ead0
|
3 |
+
size 5666
|
logs/1684463211.8126497/events.out.tfevents.1684463211.w771.3221185.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1daee3cc1862fbbe3e4f4f3561ac3fbe63088456231c97802d8167489613fa90
|
3 |
+
size 5666
|
logs/1684463370.0653572/events.out.tfevents.1684463370.w771.3222110.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0905b9bddd42a4ceb50daa56795181146c0f5ab7b4f24ab3bd6ef5dc8f2e78d3
|
3 |
+
size 5666
|
logs/events.out.tfevents.1684462715.w771.3217160.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a1b6f0f5ddf208cd8b80a4c5e774adf8d08a3b3c525617ebe54af5962aa832d8
|
3 |
+
size 3819
|
logs/events.out.tfevents.1684462832.w771.3217967.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a924afe02ae19602cdde3dab1592791143ed742724d707c5d7821b473d0fd216
|
3 |
+
size 3819
|
logs/events.out.tfevents.1684462893.w771.3218471.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e08b271ec315674bf16cdec2f72430f098325c751d4798454bdad494a61ebdb6
|
3 |
+
size 3819
|
logs/events.out.tfevents.1684462919.w771.3218753.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:97291256de5900eab031df1ba58b569f092900948f6e0169343a314ab5bdd113
|
3 |
+
size 3819
|
logs/events.out.tfevents.1684463211.w771.3221185.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:504e6effe2287119b60852440c2054594e2a32c3e563dd8e5ea294f90875e3a4
|
3 |
+
size 3725
|
logs/events.out.tfevents.1684463370.w771.3222110.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3af44c2ee6865fa672391789dfd07b7c529e3cb7afd00af6eb374ad530656feb
|
3 |
+
size 7646
|
merges.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
pytorch_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:69c323e427d067a8572b6a5d2f5847438d33c57158ee6f3fece1f7cd3ec25f23
|
3 |
+
size 27872690
|
special_tokens_map.json
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": "<s>",
|
3 |
+
"cls_token": "<s>",
|
4 |
+
"eos_token": "</s>",
|
5 |
+
"mask_token": {
|
6 |
+
"content": "<mask>",
|
7 |
+
"lstrip": true,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false
|
11 |
+
},
|
12 |
+
"pad_token": "<pad>",
|
13 |
+
"sep_token": "</s>",
|
14 |
+
"unk_token": "<unk>"
|
15 |
+
}
|
tokenizer.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_prefix_space": false,
|
3 |
+
"bos_token": "<s>",
|
4 |
+
"cls_token": "<s>",
|
5 |
+
"eos_token": "</s>",
|
6 |
+
"errors": "replace",
|
7 |
+
"mask_token": {
|
8 |
+
"__type": "AddedToken",
|
9 |
+
"content": "<mask>",
|
10 |
+
"lstrip": true,
|
11 |
+
"normalized": false,
|
12 |
+
"rstrip": false,
|
13 |
+
"single_word": false
|
14 |
+
},
|
15 |
+
"model_max_length": 512,
|
16 |
+
"name_or_path": "olm-bert-tiny-december-2022",
|
17 |
+
"pad_token": "<pad>",
|
18 |
+
"sep_token": "</s>",
|
19 |
+
"special_tokens_map_file": null,
|
20 |
+
"tokenizer_class": "RobertaTokenizer",
|
21 |
+
"trim_offsets": true,
|
22 |
+
"unk_token": "<unk>"
|
23 |
+
}
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5759fb20e80a21160c150f3b504225063aaea44b0393ad710acb4766978794ca
|
3 |
+
size 3515
|
vocab.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|