NeMo
nvidia
jiaqiz commited on
Commit
6053292
1 Parent(s): 91d516f

Add files using large-upload tool

Browse files
Files changed (26) hide show
  1. .gitattributes +25 -0
  2. model_weights/model.decoder.layers.mlp.linear_fc1.weight/19.7.0 +3 -0
  3. model_weights/model.decoder.layers.mlp.linear_fc1.weight/21.5.0 +3 -0
  4. model_weights/model.decoder.layers.mlp.linear_fc1.weight/37.2.0 +3 -0
  5. model_weights/model.decoder.layers.mlp.linear_fc1.weight/4.6.0 +3 -0
  6. model_weights/model.decoder.layers.mlp.linear_fc1.weight/45.2.0 +3 -0
  7. model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.2 +3 -0
  8. model_weights/model.decoder.layers.self_attention.linear_proj.weight/22.0.5 +3 -0
  9. model_weights/model.decoder.layers.self_attention.linear_proj.weight/23.0.2 +3 -0
  10. model_weights/model.decoder.layers.self_attention.linear_proj.weight/26.0.1 +3 -0
  11. model_weights/model.decoder.layers.self_attention.linear_proj.weight/32.0.0 +3 -0
  12. model_weights/model.decoder.layers.self_attention.linear_proj.weight/34.0.4 +3 -0
  13. model_weights/model.decoder.layers.self_attention.linear_proj.weight/36.0.2 +3 -0
  14. model_weights/model.decoder.layers.self_attention.linear_proj.weight/44.0.0 +3 -0
  15. model_weights/model.decoder.layers.self_attention.linear_proj.weight/52.0.2 +3 -0
  16. model_weights/model.decoder.layers.self_attention.linear_proj.weight/58.0.0 +3 -0
  17. model_weights/model.decoder.layers.self_attention.linear_proj.weight/6.0.5 +3 -0
  18. model_weights/model.decoder.layers.self_attention.linear_proj.weight/65.0.2 +3 -0
  19. model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.7 +3 -0
  20. model_weights/model.decoder.layers.self_attention.linear_proj.weight/69.0.2 +3 -0
  21. model_weights/model.decoder.layers.self_attention.linear_proj.weight/80.0.1 +3 -0
  22. model_weights/model.decoder.layers.self_attention.linear_proj.weight/82.0.0 +3 -0
  23. model_weights/model.decoder.layers.self_attention.linear_proj.weight/85.0.5 +3 -0
  24. model_weights/model.decoder.layers.self_attention.linear_proj.weight/87.0.1 +3 -0
  25. model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.7 +3 -0
  26. model_weights/model.decoder.layers.self_attention.linear_proj.weight/95.0.3 +3 -0
.gitattributes CHANGED
@@ -2320,3 +2320,28 @@ model_weights/model.decoder.layers.mlp.linear_fc1.weight/1.1.0 filter=lfs diff=l
2320
  model_weights/model.decoder.layers.mlp.linear_fc1.weight/33.1.0 filter=lfs diff=lfs merge=lfs -text
2321
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/37.0.2 filter=lfs diff=lfs merge=lfs -text
2322
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/1.0.2 filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2320
  model_weights/model.decoder.layers.mlp.linear_fc1.weight/33.1.0 filter=lfs diff=lfs merge=lfs -text
2321
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/37.0.2 filter=lfs diff=lfs merge=lfs -text
2322
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/1.0.2 filter=lfs diff=lfs merge=lfs -text
2323
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/34.0.4 filter=lfs diff=lfs merge=lfs -text
2324
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/44.0.0 filter=lfs diff=lfs merge=lfs -text
2325
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/32.0.0 filter=lfs diff=lfs merge=lfs -text
2326
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.7 filter=lfs diff=lfs merge=lfs -text
2327
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/52.0.2 filter=lfs diff=lfs merge=lfs -text
2328
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.2 filter=lfs diff=lfs merge=lfs -text
2329
+ model_weights/model.decoder.layers.mlp.linear_fc1.weight/37.2.0 filter=lfs diff=lfs merge=lfs -text
2330
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/22.0.5 filter=lfs diff=lfs merge=lfs -text
2331
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/58.0.0 filter=lfs diff=lfs merge=lfs -text
2332
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/82.0.0 filter=lfs diff=lfs merge=lfs -text
2333
+ model_weights/model.decoder.layers.mlp.linear_fc1.weight/4.6.0 filter=lfs diff=lfs merge=lfs -text
2334
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/80.0.1 filter=lfs diff=lfs merge=lfs -text
2335
+ model_weights/model.decoder.layers.mlp.linear_fc1.weight/21.5.0 filter=lfs diff=lfs merge=lfs -text
2336
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/85.0.5 filter=lfs diff=lfs merge=lfs -text
2337
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/65.0.2 filter=lfs diff=lfs merge=lfs -text
2338
+ model_weights/model.decoder.layers.mlp.linear_fc1.weight/45.2.0 filter=lfs diff=lfs merge=lfs -text
2339
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/23.0.2 filter=lfs diff=lfs merge=lfs -text
2340
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/95.0.3 filter=lfs diff=lfs merge=lfs -text
2341
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/87.0.1 filter=lfs diff=lfs merge=lfs -text
2342
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/36.0.2 filter=lfs diff=lfs merge=lfs -text
2343
+ model_weights/model.decoder.layers.mlp.linear_fc1.weight/19.7.0 filter=lfs diff=lfs merge=lfs -text
2344
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/26.0.1 filter=lfs diff=lfs merge=lfs -text
2345
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/6.0.5 filter=lfs diff=lfs merge=lfs -text
2346
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.7 filter=lfs diff=lfs merge=lfs -text
2347
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/69.0.2 filter=lfs diff=lfs merge=lfs -text
model_weights/model.decoder.layers.mlp.linear_fc1.weight/19.7.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dfa2c3980070a4d487be55f2a754f56867ebc5727a1143c25dc72daee7aca257
3
+ size 339738624
model_weights/model.decoder.layers.mlp.linear_fc1.weight/21.5.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ace92fa8a058e9b41cc857cf9e7f4d6cd90f973b15c6cd4f668586e2f628c382
3
+ size 339738624
model_weights/model.decoder.layers.mlp.linear_fc1.weight/37.2.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67df1328ef4e43ae1f5658378678e53f14c76a53c7ac147c81b64ea3bb54e96e
3
+ size 339738624
model_weights/model.decoder.layers.mlp.linear_fc1.weight/4.6.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f07876fe14b3d6715e78daa4dcc8326cf89e6f9d12f98b0174a6ff77f7881cc6
3
+ size 339738624
model_weights/model.decoder.layers.mlp.linear_fc1.weight/45.2.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f295c2b7ddbca34904bc4baa7a7f7b8ab92b41c7d62c8e6a34a02453d517798
3
+ size 339738624
model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72e9c24522707be170eb9639aed84464ff4b1f9a11da58533e2e1e63dc6b65f6
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/22.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:164021c2f30d9c398131306ea0884162e3cef1df262cdc59cadcf8f3b28633c3
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/23.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:569b8dae3cf836e28b6275688472b03977a7eec720364cae3e046f6fa9dfb239
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/26.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2669e84ca2c5d6ebdc048d84fd79898d7ca8cca78ed1cb9df92d757d52bed3e1
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/32.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb146fef8c19258ff722afd964f97e098e2579e99519ee3d996edca1fd5a0d64
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/34.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81e5e00ce249ee8c0f11dcfbccda6877425cf52cfd4c7a6423c5ca46d9dbc5fd
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/36.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d865abaae62e034f9b6185081e907726b49b0af786aa1689f3744428b494c64f
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/44.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8bae70fceac10c8f4f7b7da09a6f181d8684639520cee53a11f923b18af6b41
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/52.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73813956d97082568f1fd27e0b278598c38f1f88d72dc38295eec2fb7065db5d
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/58.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95d860ad4f718f9e83fd9e5e13f371b21684a1ef772176fa0d24f22eb7a4d5ab
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/6.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22dd9cd8eb9afcae35c53296ee56ce75fdfe3c33d80dda43029e38d980d6fd3a
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/65.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0b262c15a5ebd74c1f70966836fb2d526266f471a5d153aacf11b5e90b71540
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e859137d8f824bf8239f572baf35bc8a7d3edf525d5ca55bfc38baf079baa67
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/69.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aaca897dcdbeea27e269f02a81c199591b61250a3408a108f52726c078850377
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/80.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3e021af4e6f1c58eca51e9653afd2555e0d597a73389bb31fdcd169fe0186a1
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/82.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ddd43bfeb6e2a7fdad3715e610f2b27cf3562bd19bea6c2181635794b14f006
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/85.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40ed25b636bb9c43293153523d9ef1e6d246111cb26adcc6bb1aaf55ef4250f8
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/87.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d118fef35e12dcdbdbc1d11abe8b9bc0a21ba566b239e9ad0a6c4aaaf5c58fe
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:407127d90cb076e3e7bf8b62138fed2ca3c2dca4f9cc17cd169b9a6797e31cec
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/95.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abbf88ae57f5dbf99739611f6fdf1a222911868810a3e4250137f3bcea8bf472
3
+ size 84934656