NeMo
nvidia
jiaqiz commited on
Commit
785388f
1 Parent(s): 7bbd3fd

Add files using large-upload tool

Browse files
Files changed (26) hide show
  1. .gitattributes +7 -0
  2. model_weights/model.decoder.layers.mlp.linear_fc1.weight/12.1.0 +3 -0
  3. model_weights/model.decoder.layers.mlp.linear_fc1.weight/42.4.0 +3 -0
  4. model_weights/model.decoder.layers.mlp.linear_fc1.weight/43.6.0 +3 -0
  5. model_weights/model.decoder.layers.mlp.linear_fc1.weight/55.1.0 +3 -0
  6. model_weights/model.decoder.layers.mlp.linear_fc1.weight/6.0.0 +3 -0
  7. model_weights/model.decoder.layers.mlp.linear_fc1.weight/73.1.0 +3 -0
  8. model_weights/model.decoder.layers.mlp.linear_fc1.weight/81.7.0 +3 -0
  9. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_11_96.pt +3 -0
  10. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_14_96.pt +3 -0
  11. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_16_96.pt +3 -0
  12. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_17_96.pt +3 -0
  13. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_23_96.pt +3 -0
  14. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_29_96.pt +3 -0
  15. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_39_96.pt +3 -0
  16. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_52_96.pt +3 -0
  17. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_54_96.pt +3 -0
  18. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_69_96.pt +3 -0
  19. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_6_96.pt +3 -0
  20. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_71_96.pt +3 -0
  21. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_75_96.pt +3 -0
  22. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_85_96.pt +3 -0
  23. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_88_96.pt +3 -0
  24. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_8_96.pt +3 -0
  25. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_90_96.pt +3 -0
  26. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_9_96.pt +3 -0
.gitattributes CHANGED
@@ -2287,3 +2287,10 @@ model_weights/model.decoder.layers.mlp.linear_fc1.weight/47.2.0 filter=lfs diff=
2287
  model_weights/model.decoder.layers.mlp.linear_fc1.weight/30.3.0 filter=lfs diff=lfs merge=lfs -text
2288
  model_weights/model.decoder.layers.mlp.linear_fc1.weight/92.5.0 filter=lfs diff=lfs merge=lfs -text
2289
  model_weights/model.decoder.layers.mlp.linear_fc1.weight/27.7.0 filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
2287
  model_weights/model.decoder.layers.mlp.linear_fc1.weight/30.3.0 filter=lfs diff=lfs merge=lfs -text
2288
  model_weights/model.decoder.layers.mlp.linear_fc1.weight/92.5.0 filter=lfs diff=lfs merge=lfs -text
2289
  model_weights/model.decoder.layers.mlp.linear_fc1.weight/27.7.0 filter=lfs diff=lfs merge=lfs -text
2290
+ model_weights/model.decoder.layers.mlp.linear_fc1.weight/55.1.0 filter=lfs diff=lfs merge=lfs -text
2291
+ model_weights/model.decoder.layers.mlp.linear_fc1.weight/42.4.0 filter=lfs diff=lfs merge=lfs -text
2292
+ model_weights/model.decoder.layers.mlp.linear_fc1.weight/81.7.0 filter=lfs diff=lfs merge=lfs -text
2293
+ model_weights/model.decoder.layers.mlp.linear_fc1.weight/43.6.0 filter=lfs diff=lfs merge=lfs -text
2294
+ model_weights/model.decoder.layers.mlp.linear_fc1.weight/12.1.0 filter=lfs diff=lfs merge=lfs -text
2295
+ model_weights/model.decoder.layers.mlp.linear_fc1.weight/6.0.0 filter=lfs diff=lfs merge=lfs -text
2296
+ model_weights/model.decoder.layers.mlp.linear_fc1.weight/73.1.0 filter=lfs diff=lfs merge=lfs -text
model_weights/model.decoder.layers.mlp.linear_fc1.weight/12.1.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1660a3fca26c65fc6836e58a06dda55294869640c949deeb4076f0931f08e121
3
+ size 339738624
model_weights/model.decoder.layers.mlp.linear_fc1.weight/42.4.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c72561dfa10d373a1d7c58dfb9db09ec14698ca7d7589de281b55e94fd4156b
3
+ size 339738624
model_weights/model.decoder.layers.mlp.linear_fc1.weight/43.6.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c249e7a31e6657a1c3899fa4369771e34f31c49f64d218fd6b94fe76f5bb589
3
+ size 339738624
model_weights/model.decoder.layers.mlp.linear_fc1.weight/55.1.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5d780f94379deb0c9abfa1bacc97a8d7e43c92c66cba11d8160b6820bb7b5d4
3
+ size 339738624
model_weights/model.decoder.layers.mlp.linear_fc1.weight/6.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:222ab8b30c1279e363f586cfe8d990b4316017258134368ff84cad60efb779b7
3
+ size 339738624
model_weights/model.decoder.layers.mlp.linear_fc1.weight/73.1.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b59959d94aa29b27f60ca8b736ac3dfe81d656d36884787c292dfc8cf9964ab
3
+ size 339738624
model_weights/model.decoder.layers.mlp.linear_fc1.weight/81.7.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85251bfeed0507f3d97aff69442d75bae2923192824e744c4115687637a7bf57
3
+ size 339738624
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_11_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c16ab7e975ec1683fb246b8846d5e3522e81110e097d049d6e7e47968dd0def
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_14_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9411f9dce48a1d3a9696ced7800e3e952fd83beecc0f2b6789ead864be85342e
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_16_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56380ee5d0a36253c51b016e2a2a6647ebf73727236776bd0107e421f8859f4a
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_17_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abcbc429045a564b70b5b8e6a3aebd65d5c9033d0851500eb76c099f47d78a95
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_23_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86d70087fe5bd0019d9847df5a138621415c0f577e4aa73ffa746494c0a51c33
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_29_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93a02cbdcf51abcc861138a79d97d339de3a73e2f095bcdb91b275a69d84e44b
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_39_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b109bc001c35c681140f7729d952690a2e524e10f7f31b66473cd9368535724
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_52_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4da794ebf6d3e382c4d891c6de84821e639d5e3913acd3ce4ce2aabda4a0be5e
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_54_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8863123f96f55dd3b4087785be202cb5ff411f5c651d8ac8e0442c34e7c35437
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_69_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3693816819b2ae665b43c0df95814b66c5d56af7f1fd8b5870887118b2ce0bf9
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_6_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4dc039da97daf76c738a1e6cadbe5f420e5ae9813fff7d52383f7a8a6607da5
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_71_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc22fd1bd6a75ad5bf28b3d839e81fa367a5ade592e6d13686255d9e7638997f
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_75_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af07f6ddc756fcb79a4871a38bab569ed54f5c8e92d27ab935c1719ec6d9ad7b
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_85_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc4edfcb2c11792b55ddf7c83de62c3bb26d09db3990f5e703c0a864bf1d9635
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_88_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:882291292bd8fa87f3cc5efb0b518fb98a28f78d991be5774119ecaaa2bd41b7
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_8_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79b05b5e7449cacc1b575cb6f9a5a09f210e53f64d2764a59ae613b6629c466c
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_90_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1784707fcd135c45e6816dddceadc9820f25df2fb07797c61727ad5897929465
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_9_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aee04ee3a9964d7e6d305e5c1ad233bf6ef32b34178cda7bf7c2083c011e4e56
3
+ size 1836