NeMo
nvidia
jiaqiz commited on
Commit
6ca92f2
1 Parent(s): 0cf3249

Add files using large-upload tool

Browse files
Files changed (26) hide show
  1. .gitattributes +25 -0
  2. model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.5 +3 -0
  3. model_weights/model.decoder.layers.self_attention.linear_proj.weight/1.0.3 +3 -0
  4. model_weights/model.decoder.layers.self_attention.linear_proj.weight/12.0.4 +3 -0
  5. model_weights/model.decoder.layers.self_attention.linear_proj.weight/13.0.2 +3 -0
  6. model_weights/model.decoder.layers.self_attention.linear_proj.weight/2.0.4 +3 -0
  7. model_weights/model.decoder.layers.self_attention.linear_proj.weight/22.0.2 +3 -0
  8. model_weights/model.decoder.layers.self_attention.linear_proj.weight/23.0.3 +3 -0
  9. model_weights/model.decoder.layers.self_attention.linear_proj.weight/24.0.0 +3 -0
  10. model_weights/model.decoder.layers.self_attention.linear_proj.weight/29.0.3 +3 -0
  11. model_weights/model.decoder.layers.self_attention.linear_proj.weight/34.0.1 +3 -0
  12. model_weights/model.decoder.layers.self_attention.linear_proj.weight/37.0.3 +3 -0
  13. model_weights/model.decoder.layers.self_attention.linear_proj.weight/38.0.6 +3 -0
  14. model_weights/model.decoder.layers.self_attention.linear_proj.weight/40.0.7 +3 -0
  15. model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.4 +3 -0
  16. model_weights/model.decoder.layers.self_attention.linear_proj.weight/48.0.3 +3 -0
  17. model_weights/model.decoder.layers.self_attention.linear_proj.weight/54.0.3 +3 -0
  18. model_weights/model.decoder.layers.self_attention.linear_proj.weight/56.0.3 +3 -0
  19. model_weights/model.decoder.layers.self_attention.linear_proj.weight/56.0.6 +3 -0
  20. model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.1 +3 -0
  21. model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.3 +3 -0
  22. model_weights/model.decoder.layers.self_attention.linear_proj.weight/82.0.6 +3 -0
  23. model_weights/model.decoder.layers.self_attention.linear_proj.weight/83.0.1 +3 -0
  24. model_weights/model.decoder.layers.self_attention.linear_proj.weight/84.0.4 +3 -0
  25. model_weights/model.decoder.layers.self_attention.linear_proj.weight/93.0.1 +3 -0
  26. model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.4 +3 -0
.gitattributes CHANGED
@@ -2895,3 +2895,28 @@ model_weights/model.decoder.layers.self_attention.linear_proj.weight/73.0.2 filt
2895
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/29.0.1 filter=lfs diff=lfs merge=lfs -text
2896
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/55.0.4 filter=lfs diff=lfs merge=lfs -text
2897
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/23.0.5 filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2895
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/29.0.1 filter=lfs diff=lfs merge=lfs -text
2896
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/55.0.4 filter=lfs diff=lfs merge=lfs -text
2897
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/23.0.5 filter=lfs diff=lfs merge=lfs -text
2898
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/37.0.3 filter=lfs diff=lfs merge=lfs -text
2899
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/22.0.2 filter=lfs diff=lfs merge=lfs -text
2900
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/56.0.6 filter=lfs diff=lfs merge=lfs -text
2901
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/13.0.2 filter=lfs diff=lfs merge=lfs -text
2902
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/54.0.3 filter=lfs diff=lfs merge=lfs -text
2903
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/40.0.7 filter=lfs diff=lfs merge=lfs -text
2904
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.3 filter=lfs diff=lfs merge=lfs -text
2905
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/38.0.6 filter=lfs diff=lfs merge=lfs -text
2906
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.4 filter=lfs diff=lfs merge=lfs -text
2907
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/34.0.1 filter=lfs diff=lfs merge=lfs -text
2908
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/48.0.3 filter=lfs diff=lfs merge=lfs -text
2909
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/83.0.1 filter=lfs diff=lfs merge=lfs -text
2910
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/23.0.3 filter=lfs diff=lfs merge=lfs -text
2911
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/24.0.0 filter=lfs diff=lfs merge=lfs -text
2912
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/1.0.3 filter=lfs diff=lfs merge=lfs -text
2913
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.1 filter=lfs diff=lfs merge=lfs -text
2914
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/84.0.4 filter=lfs diff=lfs merge=lfs -text
2915
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/2.0.4 filter=lfs diff=lfs merge=lfs -text
2916
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/93.0.1 filter=lfs diff=lfs merge=lfs -text
2917
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/12.0.4 filter=lfs diff=lfs merge=lfs -text
2918
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/82.0.6 filter=lfs diff=lfs merge=lfs -text
2919
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/56.0.3 filter=lfs diff=lfs merge=lfs -text
2920
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.5 filter=lfs diff=lfs merge=lfs -text
2921
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/29.0.3 filter=lfs diff=lfs merge=lfs -text
2922
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.4 filter=lfs diff=lfs merge=lfs -text
model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3edfc5d3685bcb535809a11c253f636f6ef595df4424aafa43b3c98f986868b
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/1.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9165933d341771082b8adc588971f4da814ff8d5eed992da724f18e10ce74515
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/12.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf4489a28243f5b9863b06711cf645666508515e21179afd0bcfe19f2d87f52a
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/13.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6148de9b24eb03c48e0a7650e49b0d18ff3f9259571d5e0cfeaea631b5e6a51f
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/2.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a9a99ecd01662dc7f08e5dba4ee75b228ac32b2f8443052c226ad1ecc5446e8
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/22.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8c74a5c26ef30887f52f51de63010111b50e21f62302cfed809727e78192146
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/23.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99f23efe0b513da5470053444e2f2cb7823d5af17e8ffc954abe2922d01f578c
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/24.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:319aad21be9d3374825a50d07279da8288472e6dfc42400f36572fec6d8a6227
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/29.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:665b9fb44c1964238372590c1ec65feb9520e9b83fa08cf4af6fe0ce9e7ac11b
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/34.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f84f81104c0cdea661934bd752926cea6ba51a89ced2336eedb64825311abc8
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/37.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af9e60fa3584195bdfa06d14af164910c711794d0ec76861f3af1a6b58e71ed2
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/38.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8b359a4e22853f12a4115e1c92d9352bf500d8ad042c8163855af9b7477ff08
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/40.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e84972543a4ccd77754ab313803bc61c502e39bd4e6a8b25d17eedba581c7e4c
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12f609dda65452d163ac9595c9dbfb4e939b910e384a8d3c3191d63b219a4da0
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/48.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44510d02d68e59d0574787b6825153db75696cee0c206ea7ada5ae094878a2c3
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/54.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6b3b52fd0faf9fe20a7391dbdb070c6ecc048ab52cc6314a9526781bcc4073d
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/56.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa6b31bd615ae02d37852bf15fe3ab8a1d2ab2400620b6cde5ca89e3e043a3ce
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/56.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d29f21ad7c4ad1ea3c8ae52a947ef0dca1271811c033b542ddbfd5cb87890281
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17c0fb1c600e3a9b45927f0369096edf8166e5a2283f2a0441577be412944fcf
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:767fdcd66e648391f6100afbc1960b36ec30d31ef8fe90837ad99b3963234a57
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/82.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2137cc8ca13cfff52713687aa27c17eeb45f78ac28eb2c17adb1b39b9842c25
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/83.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:194884566175e9a779b57deb31590ca2f6062e8beb0ae98e52f048b89e1be9e8
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/84.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3aabb06691914c0d5fee7df45148b4cb06c0498f903f36f516f886fef2999ef
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/93.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b93d4f7c3f0f07fd2080429d5daba594fd63be78b5af744eb0f32a384fe29bb
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f15a420260e261269820d011c0b58c835c16803a4eca204e0970a5993e34c877
3
+ size 84934656