PontifexMaximus commited on
Commit
463005b
1 Parent(s): 0ea848d

Training in progress, step 500

Browse files
.gitattributes CHANGED
@@ -25,3 +25,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
25
  *.zip filter=lfs diff=lfs merge=lfs -text
26
  *.zstandard filter=lfs diff=lfs merge=lfs -text
27
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
25
  *.zip filter=lfs diff=lfs merge=lfs -text
26
  *.zstandard filter=lfs diff=lfs merge=lfs -text
27
  *tfevents* filter=lfs diff=lfs merge=lfs -text
28
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1 @@
 
1
+ checkpoint-*/
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "persiannlp/mt5-small-parsinlu-opus-translation_fa_en",
3
+ "architectures": [
4
+ "MT5ForConditionalGeneration"
5
+ ],
6
+ "d_ff": 1024,
7
+ "d_kv": 64,
8
+ "d_model": 512,
9
+ "decoder_start_token_id": 0,
10
+ "dropout_rate": 0.1,
11
+ "eos_token_id": 1,
12
+ "feed_forward_proj": "gated-gelu",
13
+ "initializer_factor": 1.0,
14
+ "is_encoder_decoder": true,
15
+ "layer_norm_epsilon": 1e-06,
16
+ "model_type": "mt5",
17
+ "num_decoder_layers": 8,
18
+ "num_heads": 6,
19
+ "num_layers": 8,
20
+ "pad_token_id": 0,
21
+ "relative_attention_max_distance": 128,
22
+ "relative_attention_num_buckets": 32,
23
+ "tie_word_embeddings": false,
24
+ "tokenizer_class": "T5Tokenizer",
25
+ "torch_dtype": "float32",
26
+ "transformers_version": "4.19.2",
27
+ "use_cache": true,
28
+ "vocab_size": 250112
29
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2e1d80f3a00434cdf1579f4b2f94aac4857c77faa63b45cce2bece7fe840636
3
+ size 1200773995
runs/Jun03_10-59-13_a7e93b83523a/1654253963.827113/events.out.tfevents.1654253963.a7e93b83523a.32517.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9cd5d9dcd1b58cb2e9e931f3ddf668d5cede9a4dbf5465ab394863d83545c7d
3
+ size 5523
runs/Jun03_10-59-13_a7e93b83523a/events.out.tfevents.1654253963.a7e93b83523a.32517.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61b245c2d7289288ed144c930160f0374fe8456190eec2b8f2b601d5d93a6a11
3
+ size 4589
runs/Jun03_11-02-27_a7e93b83523a/1654254156.4056969/events.out.tfevents.1654254156.a7e93b83523a.32517.9 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ba6b8d72ea097f7c5415634d874e058a10e53cce9fa5075790a51bcf633cf9e
3
+ size 5523
runs/Jun03_11-02-27_a7e93b83523a/events.out.tfevents.1654254156.a7e93b83523a.32517.8 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2c3acb26a2c9370760e9802686ee11755409965195daad17d9ab3866d421377d
3
+ size 3849
runs/Jun03_11-19-58_a7e93b83523a/1654255207.621004/events.out.tfevents.1654255207.a7e93b83523a.32517.11 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4835ab626da671dc314d6a0af1283368c887419f06960403b8bf1abc53bc591
3
+ size 5523
runs/Jun03_11-19-58_a7e93b83523a/events.out.tfevents.1654255207.a7e93b83523a.32517.10 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6251dd04b5b5865168c2a3793bf1fb575509e856eef4284c999c6e831937412f
3
+ size 40
runs/Jun03_11-24-40_a7e93b83523a/1654255491.2047496/events.out.tfevents.1654255491.a7e93b83523a.83482.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:97fcc7e648985531cf367cec50924eca473955cf163a3c3f56f50159cc9574de
3
+ size 5523
runs/Jun03_11-24-40_a7e93b83523a/events.out.tfevents.1654255491.a7e93b83523a.83482.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c0c2cd0c72b6211ba3bff36823b664ca6b7a230229688f174120e9f0a7f0574
3
+ size 5116
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>"}
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef78f86560d809067d12bac6c09f19a462cb3af3f54d2b8acbba26e1433125d6
3
+ size 4309802
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:faaa6405f5f79c9e788c7980874a9a3b5b0aea07b53bd9243bf1abb8f5c49c81
3
+ size 16330467
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"eos_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "extra_ids": 0, "additional_special_tokens": null, "special_tokens_map_file": "/Users/danielk/.cache/huggingface/transformers/685ac0ca8568ec593a48b61b0a3c272beee9bc194a3c7241d15dcadb5f875e53.f76030f3ec1b96a8199b2593390c610e76ca8028ef3d24680000619ffb646276", "name_or_path": "persiannlp/mt5-small-parsinlu-opus-translation_fa_en", "sp_model_kwargs": {}, "tokenizer_class": "T5Tokenizer"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8d010e714516edf08d245b5ed5ba2eed35334081ede1a87218ad493de7283b1
3
+ size 3439