athairus commited on
Commit
224f5ca
1 Parent(s): a2dd517

Spoonfy demo (litk-es-en-10-epochs)

Browse files
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"[GRP0]": 128112, "[GRP1]": 128113, "[GRP2]": 128114, "[GRP3]": 128115, "[GRP4]": 128116, "[GRP5]": 128117, "[GRP6]": 128118, "[GRP7]": 128119, "[GRP8]": 128120, "[GRP9]": 128121, "[GRP10]": 128122, "[GRP11]": 128123, "[GRP12]": 128124, "[GRP13]": 128125, "[GRP14]": 128126, "[GRP15]": 128127, "[GRP16]": 128128, "[GRP17]": 128129, "[GRP18]": 128130, "[GRP19]": 128131, "[GRP20]": 128132, "[GRP21]": 128133, "[GRP22]": 128134, "[GRP23]": 128135, "[GRP24]": 128136, "[GRP25]": 128137, "[GRP26]": 128138, "[GRP27]": 128139, "[GRP28]": 128140, "[GRP29]": 128141, "[GRP30]": 128142, "[GRP31]": 128143, "[GRP32]": 128144, "[GRP33]": 128145, "[GRP34]": 128146, "[GRP35]": 128147, "[GRP36]": 128148, "[GRP37]": 128149, "[GRP38]": 128150, "[GRP39]": 128151, "[GRP40]": 128152, "[GRP41]": 128153, "[GRP42]": 128154, "[GRP43]": 128155, "[GRP44]": 128156, "[GRP45]": 128157, "[GRP46]": 128158, "[GRP47]": 128159, "[GRP48]": 128160, "[GRP49]": 128161, "[GRP50]": 128162, "[GRP51]": 128163, "[GRP52]": 128164, "[GRP53]": 128165, "[GRP54]": 128166, "[GRP55]": 128167, "[GRP56]": 128168, "[GRP57]": 128169, "[GRP58]": 128170, "[GRP59]": 128171, "[GRP60]": 128172, "[GRP61]": 128173, "[GRP62]": 128174, "[GRP63]": 128175, "[GRP64]": 128176, "[GRP65]": 128177, "[GRP66]": 128178, "[GRP67]": 128179, "[GRP68]": 128180, "[GRP69]": 128181, "[GRP70]": 128182, "[GRP71]": 128183, "[GRP72]": 128184, "[GRP73]": 128185, "[GRP74]": 128186, "[GRP75]": 128187, "[GRP76]": 128188, "[GRP77]": 128189, "[GRP78]": 128190, "[GRP79]": 128191, "[GRP80]": 128192, "[GRP81]": 128193, "[GRP82]": 128194, "[GRP83]": 128195, "[GRP84]": 128196, "[GRP85]": 128197, "[GRP86]": 128198, "[GRP87]": 128199, "[GRP88]": 128200, "[GRP89]": 128201, "[GRP90]": 128202, "[GRP91]": 128203, "[GRP92]": 128204, "[GRP93]": 128205, "[GRP94]": 128206, "[GRP95]": 128207, "[GRP96]": 128208, "[GRP97]": 128209, "[GRP98]": 128210, "[GRP99]": 128211, "[GRP100]": 128212, "[GRP101]": 128213, "[GRP102]": 128214, "[GRP103]": 128215, "[GRP104]": 128216, "[GRP105]": 128217, "[GRP106]": 128218, "[GRP107]": 128219, "[GRP108]": 128220, "[GRP109]": 128221, "[GRP110]": 128222, "[GRP111]": 128223, "[GRP112]": 128224, "[GRP113]": 128225, "[GRP114]": 128226, "[GRP115]": 128227, "[GRP116]": 128228, "[GRP117]": 128229, "[GRP118]": 128230, "[GRP119]": 128231, "[GRP120]": 128232, "[GRP121]": 128233, "[GRP122]": 128234, "[GRP123]": 128235, "[GRP124]": 128236, "[GRP125]": 128237, "[GRP126]": 128238, "[GRP127]": 128239, "<<es>>": 128240, "<<en>>": 128241, "[REF]": 128242}
config.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "spoonfy-v1/subtitles-es-en-5-epochs",
3
+ "activation_dropout": 0.0,
4
+ "activation_function": "relu",
5
+ "architectures": [
6
+ "M2M100ForConditionalGeneration"
7
+ ],
8
+ "attention_dropout": 0.1,
9
+ "bos_token_id": 0,
10
+ "d_model": 1024,
11
+ "decoder_attention_heads": 16,
12
+ "decoder_ffn_dim": 4096,
13
+ "decoder_layerdrop": 0.05,
14
+ "decoder_layers": 12,
15
+ "decoder_start_token_id": 2,
16
+ "dropout": 0.1,
17
+ "early_stopping": true,
18
+ "encoder_attention_heads": 16,
19
+ "encoder_ffn_dim": 4096,
20
+ "encoder_layerdrop": 0.05,
21
+ "encoder_layers": 12,
22
+ "eos_token_id": 2,
23
+ "gradient_checkpointing": false,
24
+ "init_std": 0.02,
25
+ "is_encoder_decoder": true,
26
+ "max_length": 200,
27
+ "max_position_embeddings": 1024,
28
+ "model_type": "m2m_100",
29
+ "num_beams": 5,
30
+ "num_hidden_layers": 12,
31
+ "pad_token_id": 1,
32
+ "scale_embedding": true,
33
+ "torch_dtype": "float32",
34
+ "transformers_version": "4.16.2",
35
+ "use_cache": true,
36
+ "vocab_size": 128243
37
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e790157818aa38f46413d7ac74d266fdba8b01073362c8a168b9caad54df1c2d
3
+ size 1936348103
sentencepiece.bpe.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8f7c76ed2a5e0822be39f0a4f95a55eb19c78f4593ce609e2edbc2aea4d380a
3
+ size 2423393
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "additional_special_tokens": ["[GRP0]", "[GRP1]", "[GRP2]", "[GRP3]", "[GRP4]", "[GRP5]", "[GRP6]", "[GRP7]", "[GRP8]", "[GRP9]", "[GRP10]", "[GRP11]", "[GRP12]", "[GRP13]", "[GRP14]", "[GRP15]", "[GRP16]", "[GRP17]", "[GRP18]", "[GRP19]", "[GRP20]", "[GRP21]", "[GRP22]", "[GRP23]", "[GRP24]", "[GRP25]", "[GRP26]", "[GRP27]", "[GRP28]", "[GRP29]", "[GRP30]", "[GRP31]", "[GRP32]", "[GRP33]", "[GRP34]", "[GRP35]", "[GRP36]", "[GRP37]", "[GRP38]", "[GRP39]", "[GRP40]", "[GRP41]", "[GRP42]", "[GRP43]", "[GRP44]", "[GRP45]", "[GRP46]", "[GRP47]", "[GRP48]", "[GRP49]", "[GRP50]", "[GRP51]", "[GRP52]", "[GRP53]", "[GRP54]", "[GRP55]", "[GRP56]", "[GRP57]", "[GRP58]", "[GRP59]", "[GRP60]", "[GRP61]", "[GRP62]", "[GRP63]", "[GRP64]", "[GRP65]", "[GRP66]", "[GRP67]", "[GRP68]", "[GRP69]", "[GRP70]", "[GRP71]", "[GRP72]", "[GRP73]", "[GRP74]", "[GRP75]", "[GRP76]", "[GRP77]", "[GRP78]", "[GRP79]", "[GRP80]", "[GRP81]", "[GRP82]", "[GRP83]", "[GRP84]", "[GRP85]", "[GRP86]", "[GRP87]", "[GRP88]", "[GRP89]", "[GRP90]", "[GRP91]", "[GRP92]", "[GRP93]", "[GRP94]", "[GRP95]", "[GRP96]", "[GRP97]", "[GRP98]", "[GRP99]", "[GRP100]", "[GRP101]", "[GRP102]", "[GRP103]", "[GRP104]", "[GRP105]", "[GRP106]", "[GRP107]", "[GRP108]", "[GRP109]", "[GRP110]", "[GRP111]", "[GRP112]", "[GRP113]", "[GRP114]", "[GRP115]", "[GRP116]", "[GRP117]", "[GRP118]", "[GRP119]", "[GRP120]", "[GRP121]", "[GRP122]", "[GRP123]", "[GRP124]", "[GRP125]", "[GRP126]", "[GRP127]", "<<es>>", "<<en>>", "[REF]"]}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"src_lang": null, "tgt_lang": null, "bos_token": "<s>", "eos_token": "</s>", "sep_token": "</s>", "unk_token": "<unk>", "pad_token": "<pad>", "language_codes": "m2m100", "sp_model_kwargs": {}, "num_madeup_words": 8, "special_tokens_map_file": "m2m_100_1.2B_v2/special_tokens_map.json", "tokenizer_file": null, "name_or_path": "spoonfy-v1/subtitles-es-en-5-epochs", "model_max_length": 1024, "additional_special_tokens": ["__af__", "__am__", "__ar__", "__ast__", "__az__", "__ba__", "__be__", "__bg__", "__bn__", "__br__", "__bs__", "__ca__", "__ceb__", "__cs__", "__cy__", "__da__", "__de__", "__el__", "__en__", "__es__", "__et__", "__fa__", "__ff__", "__fi__", "__fr__", "__fy__", "__ga__", "__gd__", "__gl__", "__gu__", "__ha__", "__he__", "__hi__", "__hr__", "__ht__", "__hu__", "__hy__", "__id__", "__ig__", "__ilo__", "__is__", "__it__", "__ja__", "__jv__", "__ka__", "__kk__", "__km__", "__kn__", "__ko__", "__lb__", "__lg__", "__ln__", "__lo__", "__lt__", "__lv__", "__mg__", "__mk__", "__ml__", "__mn__", "__mr__", "__ms__", "__my__", "__ne__", "__nl__", "__no__", "__ns__", "__oc__", "__or__", "__pa__", "__pl__", "__ps__", "__pt__", "__ro__", "__ru__", "__sd__", "__si__", "__sk__", "__sl__", "__so__", "__sq__", "__sr__", "__ss__", "__su__", "__sv__", "__sw__", "__ta__", "__th__", "__tl__", "__tn__", "__tr__", "__uk__", "__ur__", "__uz__", "__vi__", "__wo__", "__xh__", "__yi__", "__yo__", "__zh__", "__zu__"], "tokenizer_class": "M2M100Tokenizer"}
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:624daab75e721d4795babec88d7abd2a4e521051fabf4c13cbaf8815c578fd1b
3
+ size 3183
vocab.json ADDED
The diff for this file is too large to render. See raw diff