Ravencer commited on
Commit
f3665bb
1 Parent(s): 8288903
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "IlyaGusev/rut5_base_sum_gazeta",
3
+ "architectures": [
4
+ "T5ForConditionalGeneration"
5
+ ],
6
+ "bos_token_id": 2,
7
+ "d_ff": 2048,
8
+ "d_kv": 64,
9
+ "d_model": 768,
10
+ "decoder_start_token_id": 2,
11
+ "dense_act_fn": "gelu_new",
12
+ "dropout_rate": 0.1,
13
+ "eos_token_id": 1,
14
+ "feed_forward_proj": "gated-gelu",
15
+ "initializer_factor": 1.0,
16
+ "is_encoder_decoder": true,
17
+ "is_gated_act": true,
18
+ "layer_norm_epsilon": 1e-06,
19
+ "max_length": 200,
20
+ "model_type": "t5",
21
+ "num_beams": 5,
22
+ "num_decoder_layers": 12,
23
+ "num_heads": 12,
24
+ "num_layers": 12,
25
+ "output_past": true,
26
+ "pad_token_id": 0,
27
+ "relative_attention_max_distance": 128,
28
+ "relative_attention_num_buckets": 32,
29
+ "tie_word_embeddings": false,
30
+ "tokenizer_class": "T5Tokenizer",
31
+ "torch_dtype": "float32",
32
+ "transformers_version": "4.29.1",
33
+ "use_cache": true,
34
+ "vocab_size": 30000
35
+ }
generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 2,
3
+ "decoder_start_token_id": 2,
4
+ "eos_token_id": 1,
5
+ "max_length": 200,
6
+ "num_beams": 5,
7
+ "pad_token_id": 0,
8
+ "transformers_version": "4.29.1"
9
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03744d4effb777ca7247b43c72c95d5c7a0df69c2de5956eeccfdaee2826d7f0
3
+ size 977330485
runs/May21_19-09-41_DESKTOP-B79SRTK/1684671055.484253/events.out.tfevents.1684671055.DESKTOP-B79SRTK.21804.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:531c7c5bc4042399d26af037a31f54a1b0db3f04b9a5f6395f33795e63a72d87
3
+ size 6266
runs/May21_19-09-41_DESKTOP-B79SRTK/events.out.tfevents.1684671055.DESKTOP-B79SRTK.21804.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b329d74cba269abc130a7b677f3de608bb0114ec57ffd0a6ba179c14a98c38ac
3
+ size 4469
runs/May21_19-21-29_DESKTOP-B79SRTK/1684671712.6897779/events.out.tfevents.1684671712.DESKTOP-B79SRTK.18980.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:636a1f4c90316274cef99d583e887befd7681a64d65102eeeceb9803547a2882
3
+ size 6266
runs/May21_19-21-29_DESKTOP-B79SRTK/events.out.tfevents.1684671712.DESKTOP-B79SRTK.18980.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a7e03a49c3230d208cfcbb4a981f7c297a91bdab36a6b1c0945ba152f062680
3
+ size 5330
special_tokens_map.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "eos_token": "</s>",
3
+ "pad_token": "<pad>",
4
+ "unk_token": "<unk>"
5
+ }
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76927654c70c667e967c8caca165a3bb1311b415316b6aed8037ed5ede86b10e
3
+ size 827566
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": null,
3
+ "clean_up_tokenization_spaces": true,
4
+ "eos_token": "</s>",
5
+ "extra_ids": 0,
6
+ "model_max_length": 1000000000000000019884624838656,
7
+ "pad_token": "<pad>",
8
+ "sp_model_kwargs": {},
9
+ "tokenizer_class": "T5Tokenizer",
10
+ "unk_token": "<unk>"
11
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ccd0780de8a2339efeb0bc83f321d42cb1640bd943065c3f15350091eec148e
3
+ size 4091