joaogante HF staff commited on
Commit
bbdfc17
1 Parent(s): f5aa4b0

Add TF weights

Browse files
Files changed (2) hide show
  1. config.json +4 -1
  2. tf_model.h5 +3 -0
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "activation_dropout": 0.0,
3
  "activation_function": "swish",
4
  "architectures": [
@@ -18,6 +19,7 @@
18
  "decoder_layerdrop": 0.0,
19
  "decoder_layers": 6,
20
  "decoder_start_token_id": 65000,
 
21
  "dropout": 0.1,
22
  "encoder_attention_heads": 8,
23
  "encoder_ffn_dim": 2048,
@@ -35,9 +37,10 @@
35
  "num_hidden_layers": 6,
36
  "pad_token_id": 65000,
37
  "scale_embedding": true,
 
38
  "static_position_embeddings": true,
39
  "torch_dtype": "float16",
40
- "transformers_version": "4.11.3",
41
  "use_cache": true,
42
  "vocab_size": 65001
43
  }
1
  {
2
+ "_name_or_path": "/tmp/Helsinki-NLP/opus-tatoeba-en-ja",
3
  "activation_dropout": 0.0,
4
  "activation_function": "swish",
5
  "architectures": [
19
  "decoder_layerdrop": 0.0,
20
  "decoder_layers": 6,
21
  "decoder_start_token_id": 65000,
22
+ "decoder_vocab_size": 65001,
23
  "dropout": 0.1,
24
  "encoder_attention_heads": 8,
25
  "encoder_ffn_dim": 2048,
37
  "num_hidden_layers": 6,
38
  "pad_token_id": 65000,
39
  "scale_embedding": true,
40
+ "share_encoder_decoder_embeddings": true,
41
  "static_position_embeddings": true,
42
  "torch_dtype": "float16",
43
+ "transformers_version": "4.22.0.dev0",
44
  "use_cache": true,
45
  "vocab_size": 65001
46
  }
tf_model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee16721c007cdc85a504b0f92409d4da8738e336bb50b4e67d27d1c9a448303e
3
+ size 312580600