NeMo
nvidia
jiaqiz commited on
Commit
9a6c46b
1 Parent(s): cbd5bac

Add files using large-upload tool

Browse files
Files changed (25) hide show
  1. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_25_96.pt +3 -0
  2. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_41_96.pt +3 -0
  3. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_48_96.pt +3 -0
  4. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_58_96.pt +3 -0
  5. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_11_96.pt +3 -0
  6. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_14_96.pt +3 -0
  7. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_16_96.pt +3 -0
  8. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_17_96.pt +3 -0
  9. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_21_96.pt +3 -0
  10. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_23_96.pt +3 -0
  11. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_29_96.pt +3 -0
  12. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_32_96.pt +3 -0
  13. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_39_96.pt +3 -0
  14. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_3_96.pt +3 -0
  15. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_52_96.pt +3 -0
  16. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_54_96.pt +3 -0
  17. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_69_96.pt +3 -0
  18. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_6_96.pt +3 -0
  19. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_71_96.pt +3 -0
  20. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_75_96.pt +3 -0
  21. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_85_96.pt +3 -0
  22. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_88_96.pt +3 -0
  23. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_8_96.pt +3 -0
  24. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_90_96.pt +3 -0
  25. model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_9_96.pt +3 -0
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_25_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d49953deb85f273d8951db8614f12bb6a2df7dfa96b9ae426dad74a9e8460eed
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_41_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:214b5869ba1179cdceeff980f2cf65ca7ab650c06b38d2f93b625f2f3a380c58
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_48_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb4e3686edebeef34059ccb1a24181f803157ce2fe1ad32327842e52f332f016
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_58_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73b9368e11a3afd56aa6c6026d2f9e43ee3e2e7c02c20b390fba72ba95085eca
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_11_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c16ab7e975ec1683fb246b8846d5e3522e81110e097d049d6e7e47968dd0def
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_14_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9411f9dce48a1d3a9696ced7800e3e952fd83beecc0f2b6789ead864be85342e
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_16_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56380ee5d0a36253c51b016e2a2a6647ebf73727236776bd0107e421f8859f4a
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_17_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abcbc429045a564b70b5b8e6a3aebd65d5c9033d0851500eb76c099f47d78a95
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_21_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba9cf0aa9d4ba429fca6ef4999cab7c3f23988bd11cb2d64d61d0ff667cf45ba
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_23_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86d70087fe5bd0019d9847df5a138621415c0f577e4aa73ffa746494c0a51c33
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_29_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93a02cbdcf51abcc861138a79d97d339de3a73e2f095bcdb91b275a69d84e44b
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_32_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3ec2b18d724a72b6d794099f2440a62e2a76b0b343eb8562c7307923609080b
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_39_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b109bc001c35c681140f7729d952690a2e524e10f7f31b66473cd9368535724
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_3_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:728bd75592958705378d69f35359d640cda1f742ee5dc7197edc708d86e458b3
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_52_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4da794ebf6d3e382c4d891c6de84821e639d5e3913acd3ce4ce2aabda4a0be5e
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_54_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8863123f96f55dd3b4087785be202cb5ff411f5c651d8ac8e0442c34e7c35437
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_69_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3693816819b2ae665b43c0df95814b66c5d56af7f1fd8b5870887118b2ce0bf9
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_6_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4dc039da97daf76c738a1e6cadbe5f420e5ae9813fff7d52383f7a8a6607da5
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_71_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc22fd1bd6a75ad5bf28b3d839e81fa367a5ade592e6d13686255d9e7638997f
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_75_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af07f6ddc756fcb79a4871a38bab569ed54f5c8e92d27ab935c1719ec6d9ad7b
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_85_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc4edfcb2c11792b55ddf7c83de62c3bb26d09db3990f5e703c0a864bf1d9635
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_88_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:882291292bd8fa87f3cc5efb0b518fb98a28f78d991be5774119ecaaa2bd41b7
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_8_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79b05b5e7449cacc1b575cb6f9a5a09f210e53f64d2764a59ae613b6629c466c
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_90_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1784707fcd135c45e6816dddceadc9820f25df2fb07797c61727ad5897929465
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_qkv._extra_state/shard_9_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aee04ee3a9964d7e6d305e5c1ad233bf6ef32b34178cda7bf7c2083c011e4e56
3
+ size 1836