Zelyanoth commited on
Commit
3048d58
1 Parent(s): 581fcb4

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "facebook/nllb-200-1.3B",
3
  "activation_dropout": 0.0,
4
  "activation_function": "relu",
5
  "architectures": [
@@ -9,25 +9,26 @@
9
  "bos_token_id": 0,
10
  "d_model": 1024,
11
  "decoder_attention_heads": 16,
12
- "decoder_ffn_dim": 8192,
13
  "decoder_layerdrop": 0,
14
- "decoder_layers": 24,
15
  "decoder_start_token_id": 2,
16
  "dropout": 0.1,
17
  "encoder_attention_heads": 16,
18
- "encoder_ffn_dim": 8192,
19
  "encoder_layerdrop": 0,
20
- "encoder_layers": 24,
21
  "eos_token_id": 2,
22
  "init_std": 0.02,
23
  "is_encoder_decoder": true,
24
  "max_length": 200,
25
  "max_position_embeddings": 1024,
26
  "model_type": "m2m_100",
27
- "num_hidden_layers": 24,
28
  "pad_token_id": 1,
29
  "scale_embedding": true,
30
- "torch_dtype": "float16",
 
31
  "transformers_version": "4.39.3",
32
  "use_cache": true,
33
  "vocab_size": 256206
 
1
  {
2
+ "_name_or_path": "facebook/nllb-200-distilled-600M",
3
  "activation_dropout": 0.0,
4
  "activation_function": "relu",
5
  "architectures": [
 
9
  "bos_token_id": 0,
10
  "d_model": 1024,
11
  "decoder_attention_heads": 16,
12
+ "decoder_ffn_dim": 4096,
13
  "decoder_layerdrop": 0,
14
+ "decoder_layers": 12,
15
  "decoder_start_token_id": 2,
16
  "dropout": 0.1,
17
  "encoder_attention_heads": 16,
18
+ "encoder_ffn_dim": 4096,
19
  "encoder_layerdrop": 0,
20
+ "encoder_layers": 12,
21
  "eos_token_id": 2,
22
  "init_std": 0.02,
23
  "is_encoder_decoder": true,
24
  "max_length": 200,
25
  "max_position_embeddings": 1024,
26
  "model_type": "m2m_100",
27
+ "num_hidden_layers": 12,
28
  "pad_token_id": 1,
29
  "scale_embedding": true,
30
+ "tokenizer_class": "NllbTokenizer",
31
+ "torch_dtype": "float32",
32
  "transformers_version": "4.39.3",
33
  "use_cache": true,
34
  "vocab_size": 256206
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0576dc3f26cdd00433ba4d96068d992ae40ffb0868e0f4b0b854cda0e44ab1c4
3
- size 2741396584
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d05416ac70afadc3511eb82b32cdbb9d44e1102f0ce01b68d2076b6ff0394a66
3
+ size 2460354912
runs/Jun01_23-18-08_628c2640942c/events.out.tfevents.1717283890.628c2640942c.23.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01769003e1fc3be0069e0476361e7323cdaf313a525e6c87e24b265d8e8e797c
3
+ size 10889
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:79d46f04d5542e99359568acc8c450715c649e99e6f3c64bbe6eb5c49e0dda79
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a16f4d12ed1a2e80c89c4f6d3a609836bd7fc47ed22dfe36debf42b960503106
3
  size 5112