NeMo
nvidia
jiaqiz commited on
Commit
c75713a
1 Parent(s): 785388f

Add files using large-upload tool

Browse files
Files changed (25) hide show
  1. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_0_96.pt +3 -0
  2. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_21_96.pt +3 -0
  3. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_24_96.pt +3 -0
  4. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_2_96.pt +3 -0
  5. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_30_96.pt +3 -0
  6. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_32_96.pt +3 -0
  7. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_34_96.pt +3 -0
  8. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_35_96.pt +3 -0
  9. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_38_96.pt +3 -0
  10. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_3_96.pt +3 -0
  11. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_40_96.pt +3 -0
  12. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_42_96.pt +3 -0
  13. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_46_96.pt +3 -0
  14. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_50_96.pt +3 -0
  15. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_53_96.pt +3 -0
  16. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_57_96.pt +3 -0
  17. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_60_96.pt +3 -0
  18. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_64_96.pt +3 -0
  19. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_72_96.pt +3 -0
  20. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_7_96.pt +3 -0
  21. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_83_96.pt +3 -0
  22. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_84_96.pt +3 -0
  23. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_92_96.pt +3 -0
  24. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_94_96.pt +3 -0
  25. model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_95_96.pt +3 -0
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_0_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdc3d2b4de551828617fd47b96ac7c52e318644919c6e5971e59dd5af2e8eb76
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_21_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba9cf0aa9d4ba429fca6ef4999cab7c3f23988bd11cb2d64d61d0ff667cf45ba
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_24_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5179bed2b5c6e388ef22488356cbfa2ad38fad93d26689f5d1931fb21a15f18
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_2_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d577e00499f440dc15d0548a709f723869bdf605e0545cac77c538f86be47d79
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_30_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91ceb57c7deb525d11d475ca2d983ac7fa9bdc652d3767e4466a7d2375007a37
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_32_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3ec2b18d724a72b6d794099f2440a62e2a76b0b343eb8562c7307923609080b
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_34_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d91f28f0611694f24e05103be174094e61828d3965d93218f534e54088c98474
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_35_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:71fdf56e296aed9d8bfc5206799e1d1ac43f582d43ed30f515dbab40112ba258
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_38_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed893da9981fc3c965691baa95bcb6d308b355cad5c450adc7446a4fbe8cd9cb
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_3_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:728bd75592958705378d69f35359d640cda1f742ee5dc7197edc708d86e458b3
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_40_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:868c6b9c56fc7d84c93ebc415f8033e3f452a9128c71ead043e86cc7922a09fa
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_42_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c1d4594227e34abc31864fc5cb5c5bb9de50e213249edc7d1bbee867b43cef4d
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_46_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad789da70262159664d4eb45ff65671036d3e6b0987e55bb654956053176e08c
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_50_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:589423c313c3b3a1596305bef910e00c563ef1fcc0305777be0d8226cd391fdf
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_53_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f3cba55163f9cb2eb4ef8ef5e6bed32df6701f2ee0ecf5e9e8768eece137ceb
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_57_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b7f4da70f660e067b374328723d75babc567a6cd4a3a1f223d73775e6c89b800
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_60_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae1b8b2fa8a86bf8650543a16b5ce2fedfe5225dbb442fcc74bbdc8d23365d42
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_64_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2ec57d9b44038ebea927087321c80b4dfab6365b9485fc4d4d165431a5aea47
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_72_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce281fff94d3e8b57565271ccec2ee98d6e8f6ce97f487d6c4d83859db078079
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_7_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc81ecc1aa4cb51e01abc074e013df21f411e0036c1e63d587bfbd62f30ff52c
3
+ size 1836
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_83_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9afd69b274ff9b5d4b11ca94ecbd70d53374ca5d0adcc8f752ab4eebe2092be
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_84_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34365f11d7adccafc1361ec686a1f1fd8089864a95f0cd952986437aef82b8e3
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_92_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67846f3f0937c288adde34ffff89074cb52bf3d754c97aa8e129de0a0eaa33fd
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_94_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a15a899861ca2803444e551521376d87cbe33c95038243d97b7e65c2e7ce21c
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj._extra_state/shard_95_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ed9b0e88ea003e4694006347597bbf59fbaec88fec35807929aadf816051d8e
3
+ size 1840