NeMo
nvidia
jiaqiz commited on
Commit
adec83b
1 Parent(s): 39dc734

Add files using large-upload tool

Browse files
Files changed (26) hide show
  1. .gitattributes +25 -0
  2. model_weights/model.decoder.layers.self_attention.linear_proj.weight/10.0.3 +3 -0
  3. model_weights/model.decoder.layers.self_attention.linear_proj.weight/16.0.0 +3 -0
  4. model_weights/model.decoder.layers.self_attention.linear_proj.weight/17.0.5 +3 -0
  5. model_weights/model.decoder.layers.self_attention.linear_proj.weight/18.0.0 +3 -0
  6. model_weights/model.decoder.layers.self_attention.linear_proj.weight/3.0.7 +3 -0
  7. model_weights/model.decoder.layers.self_attention.linear_proj.weight/33.0.1 +3 -0
  8. model_weights/model.decoder.layers.self_attention.linear_proj.weight/34.0.0 +3 -0
  9. model_weights/model.decoder.layers.self_attention.linear_proj.weight/35.0.2 +3 -0
  10. model_weights/model.decoder.layers.self_attention.linear_proj.weight/38.0.7 +3 -0
  11. model_weights/model.decoder.layers.self_attention.linear_proj.weight/4.0.3 +3 -0
  12. model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.5 +3 -0
  13. model_weights/model.decoder.layers.self_attention.linear_proj.weight/51.0.1 +3 -0
  14. model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.0 +3 -0
  15. model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.1 +3 -0
  16. model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.2 +3 -0
  17. model_weights/model.decoder.layers.self_attention.linear_proj.weight/58.0.7 +3 -0
  18. model_weights/model.decoder.layers.self_attention.linear_proj.weight/60.0.0 +3 -0
  19. model_weights/model.decoder.layers.self_attention.linear_proj.weight/60.0.1 +3 -0
  20. model_weights/model.decoder.layers.self_attention.linear_proj.weight/61.0.0 +3 -0
  21. model_weights/model.decoder.layers.self_attention.linear_proj.weight/68.0.0 +3 -0
  22. model_weights/model.decoder.layers.self_attention.linear_proj.weight/68.0.1 +3 -0
  23. model_weights/model.decoder.layers.self_attention.linear_proj.weight/70.0.5 +3 -0
  24. model_weights/model.decoder.layers.self_attention.linear_proj.weight/75.0.0 +3 -0
  25. model_weights/model.decoder.layers.self_attention.linear_proj.weight/77.0.3 +3 -0
  26. model_weights/model.decoder.layers.self_attention.linear_proj.weight/82.0.7 +3 -0
.gitattributes CHANGED
@@ -2445,3 +2445,28 @@ model_weights/model.decoder.layers.self_attention.linear_proj.weight/10.0.4 filt
2445
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/72.0.0 filter=lfs diff=lfs merge=lfs -text
2446
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/80.0.6 filter=lfs diff=lfs merge=lfs -text
2447
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/39.0.2 filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2445
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/72.0.0 filter=lfs diff=lfs merge=lfs -text
2446
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/80.0.6 filter=lfs diff=lfs merge=lfs -text
2447
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/39.0.2 filter=lfs diff=lfs merge=lfs -text
2448
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/60.0.1 filter=lfs diff=lfs merge=lfs -text
2449
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/82.0.7 filter=lfs diff=lfs merge=lfs -text
2450
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/16.0.0 filter=lfs diff=lfs merge=lfs -text
2451
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/18.0.0 filter=lfs diff=lfs merge=lfs -text
2452
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.0 filter=lfs diff=lfs merge=lfs -text
2453
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/4.0.3 filter=lfs diff=lfs merge=lfs -text
2454
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.5 filter=lfs diff=lfs merge=lfs -text
2455
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/58.0.7 filter=lfs diff=lfs merge=lfs -text
2456
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/35.0.2 filter=lfs diff=lfs merge=lfs -text
2457
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/70.0.5 filter=lfs diff=lfs merge=lfs -text
2458
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.1 filter=lfs diff=lfs merge=lfs -text
2459
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/60.0.0 filter=lfs diff=lfs merge=lfs -text
2460
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/68.0.1 filter=lfs diff=lfs merge=lfs -text
2461
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/38.0.7 filter=lfs diff=lfs merge=lfs -text
2462
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/61.0.0 filter=lfs diff=lfs merge=lfs -text
2463
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/33.0.1 filter=lfs diff=lfs merge=lfs -text
2464
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/77.0.3 filter=lfs diff=lfs merge=lfs -text
2465
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/75.0.0 filter=lfs diff=lfs merge=lfs -text
2466
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/10.0.3 filter=lfs diff=lfs merge=lfs -text
2467
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/17.0.5 filter=lfs diff=lfs merge=lfs -text
2468
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/34.0.0 filter=lfs diff=lfs merge=lfs -text
2469
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/51.0.1 filter=lfs diff=lfs merge=lfs -text
2470
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.2 filter=lfs diff=lfs merge=lfs -text
2471
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/3.0.7 filter=lfs diff=lfs merge=lfs -text
2472
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/68.0.0 filter=lfs diff=lfs merge=lfs -text
model_weights/model.decoder.layers.self_attention.linear_proj.weight/10.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:769bcabf3af753bc03f072403fbbe97a976cd66fbd4631cb2aedf553a38116e2
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/16.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b03baf37aac5f6f0b8cf63d654c54ebbac2f8ef8060682b26d59103279151535
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/17.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad98a972917663794c2e8e6bfef3697ecc4ef069962b2b91c0bb7a54a1fe67f0
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/18.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86d545b9fa46b82affc565e7d33f1634dedeb6ea345105e10da801166d4f0bc8
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/3.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ffb0c9f48782ee0af41327338ba81a831525e940e08299998ab5a9d7d0889eb
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/33.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee9039d57ff062867f78d9c724be32a96924b92fce6f246ee41e53fd40c9bfa2
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/34.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d086bb53f60a663cb68ba6549f81eb4553a34e63d2aba5f3881bc815781fe7f
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/35.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3050c1b4959ccaf3b96a0bcd6313dd5b0dbf920b127f3791110386b198a2012
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/38.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53054225aaf50b73613906d503a25dacde699351e011a03c1db70033c359d4aa
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/4.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c8cecb766ffcf2fa81bdfffe88235dd681da1fd6605a811e5033308b6aef101
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cea123f8b5656388065d10b3e2a87e51913ed17210f865ef9eb065a90f244175
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/51.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5c57d5d0e8142c0854afdff22f0ba5aad33911d49e1225d06f9ae48248fe70b
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72cea27a54aa735f3c57ebeea914cab6873a11504d0af899f9d3de998b284192
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae1b3bd46dc28bddaca4ffa2303442e59ac9fc598ae40fcdbbdb539d263d53e3
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:947c9f0783ee7f4c4f0f2b4f79da44fd3e5a5ea6fa9fd040a451f4e8d9ea4725
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/58.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53d51aee172eed5b2071f8c292351e35580baea6f8da5e40133f10540f028954
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/60.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56c690971ed575f2b53caf382c95dabd25dc71cb75ba40b76d56a1e196338974
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/60.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e55ce8e4cdfea75d44ace67d8e9fc10246357419baf1d6ce34c108208ac7a6b
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/61.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd7647b932ba7cb0720645532a4d49d649829b27ff99140c5d92dde6d24280a1
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/68.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e03b47899898b883b58c1e8f871ba347247b4c948e4afe7285ed8b33d8904a62
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/68.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b64fb6395b07162d3ccb60e344eb2ddddd24f7f7f58aa573835936e26e9d9337
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/70.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3de9a118f488ad8f52c3b6a5dabcab74594ee5557727e0d8a6868aad99da2357
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/75.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3d589490f5fe61d0e3fc57824c485ed25f17dea83150734a837e16d1ec3986b
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/77.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9249cb7fe2fdd5cba92a6a10b3ef05c4f71730aa4f107ab8bf5d911e8e23a2a
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/82.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65a5f28dc1497fcc0726070f2d0282c6e9c8b91fd2fd5166343f8c75b21d0fac
3
+ size 84934656