NeMo
nvidia
jiaqiz commited on
Commit
dc48b99
1 Parent(s): c954e08

Add files using large-upload tool

Browse files
Files changed (26) hide show
  1. .gitattributes +25 -0
  2. model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.0 +3 -0
  3. model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.6 +3 -0
  4. model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.7 +3 -0
  5. model_weights/model.decoder.layers.self_attention.linear_proj.weight/12.0.7 +3 -0
  6. model_weights/model.decoder.layers.self_attention.linear_proj.weight/15.0.7 +3 -0
  7. model_weights/model.decoder.layers.self_attention.linear_proj.weight/21.0.5 +3 -0
  8. model_weights/model.decoder.layers.self_attention.linear_proj.weight/23.0.7 +3 -0
  9. model_weights/model.decoder.layers.self_attention.linear_proj.weight/30.0.3 +3 -0
  10. model_weights/model.decoder.layers.self_attention.linear_proj.weight/30.0.6 +3 -0
  11. model_weights/model.decoder.layers.self_attention.linear_proj.weight/33.0.5 +3 -0
  12. model_weights/model.decoder.layers.self_attention.linear_proj.weight/41.0.6 +3 -0
  13. model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.7 +3 -0
  14. model_weights/model.decoder.layers.self_attention.linear_proj.weight/49.0.5 +3 -0
  15. model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.4 +3 -0
  16. model_weights/model.decoder.layers.self_attention.linear_proj.weight/56.0.5 +3 -0
  17. model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.3 +3 -0
  18. model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.5 +3 -0
  19. model_weights/model.decoder.layers.self_attention.linear_proj.weight/65.0.3 +3 -0
  20. model_weights/model.decoder.layers.self_attention.linear_proj.weight/67.0.2 +3 -0
  21. model_weights/model.decoder.layers.self_attention.linear_proj.weight/74.0.0 +3 -0
  22. model_weights/model.decoder.layers.self_attention.linear_proj.weight/78.0.1 +3 -0
  23. model_weights/model.decoder.layers.self_attention.linear_proj.weight/81.0.7 +3 -0
  24. model_weights/model.decoder.layers.self_attention.linear_proj.weight/87.0.0 +3 -0
  25. model_weights/model.decoder.layers.self_attention.linear_proj.weight/92.0.5 +3 -0
  26. model_weights/model.decoder.layers.self_attention.linear_proj.weight/95.0.6 +3 -0
.gitattributes CHANGED
@@ -2645,3 +2645,28 @@ model_weights/model.decoder.layers.self_attention.linear_proj.weight/21.0.1 filt
2645
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/45.0.3 filter=lfs diff=lfs merge=lfs -text
2646
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/19.0.6 filter=lfs diff=lfs merge=lfs -text
2647
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/91.0.4 filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2645
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/45.0.3 filter=lfs diff=lfs merge=lfs -text
2646
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/19.0.6 filter=lfs diff=lfs merge=lfs -text
2647
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/91.0.4 filter=lfs diff=lfs merge=lfs -text
2648
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.0 filter=lfs diff=lfs merge=lfs -text
2649
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/23.0.7 filter=lfs diff=lfs merge=lfs -text
2650
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/74.0.0 filter=lfs diff=lfs merge=lfs -text
2651
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/41.0.6 filter=lfs diff=lfs merge=lfs -text
2652
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/33.0.5 filter=lfs diff=lfs merge=lfs -text
2653
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.6 filter=lfs diff=lfs merge=lfs -text
2654
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/65.0.3 filter=lfs diff=lfs merge=lfs -text
2655
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/67.0.2 filter=lfs diff=lfs merge=lfs -text
2656
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/92.0.5 filter=lfs diff=lfs merge=lfs -text
2657
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/78.0.1 filter=lfs diff=lfs merge=lfs -text
2658
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/56.0.5 filter=lfs diff=lfs merge=lfs -text
2659
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/30.0.3 filter=lfs diff=lfs merge=lfs -text
2660
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/95.0.6 filter=lfs diff=lfs merge=lfs -text
2661
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/49.0.5 filter=lfs diff=lfs merge=lfs -text
2662
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/15.0.7 filter=lfs diff=lfs merge=lfs -text
2663
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/87.0.0 filter=lfs diff=lfs merge=lfs -text
2664
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/81.0.7 filter=lfs diff=lfs merge=lfs -text
2665
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.4 filter=lfs diff=lfs merge=lfs -text
2666
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/30.0.6 filter=lfs diff=lfs merge=lfs -text
2667
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/12.0.7 filter=lfs diff=lfs merge=lfs -text
2668
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.3 filter=lfs diff=lfs merge=lfs -text
2669
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.5 filter=lfs diff=lfs merge=lfs -text
2670
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.7 filter=lfs diff=lfs merge=lfs -text
2671
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/21.0.5 filter=lfs diff=lfs merge=lfs -text
2672
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.7 filter=lfs diff=lfs merge=lfs -text
model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a64c6b8b8ef08d45520965d678065489964ec1105017e05e6037e48425f876eb
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c2f5a38bc04f6b54d4a4f6e466da6cad9389ce7fb789f06cae3e9e17c6d468d
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/0.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:603173052374fe8bb6397d7033d1b3bc5bf3cbf124a5ece5a42081e3dd94281c
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/12.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b954d9134eb8f70da364dc989f8dab5802c602d3dab278aa1697e9163712617
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/15.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d734137d7c220a17439aa20410c6ecd62774bc78680559cd1dd8e824327c2266
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/21.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf0382ed0bbca36a8c4f534d82ba10321bdaac1fdd8b7d562a39e65803445725
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/23.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5dc7c6689c405f129a2a2bece57084a361f2f79c239f65dca1ccb181b968271f
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/30.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:10b7b4bbbbb9ed20d4236fa8d0bc3a837332b9b5553a19faf7ff003aaf1e19d7
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/30.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80d56add1c9a0c1064314ae97c750b2b5876c40f26d079deae2d70b0ee457bd5
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/33.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d28966a1e9d2ced8b76f2a8980c9bc8c4e9564b82f50f6b591b56587f7976771
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/41.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:580bd635d33d7a1103611118adbcb796336e2f22fa1819d75ddcb9e9f70d1d8f
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc4e7c213969233411c20d9efe0c790107f09bed583a12ec7dd5b98d3b34d048
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/49.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20b2715f92043eab1b01eb546db8e2d80810bb250d28c9f095383666d4e2ebb6
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a50344eadedbb2547fde820421e552b46d673a1e4eaadb3ccd0dda81efb4098b
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/56.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4496cdd5a2b47b8f56244707dfaf07ff2199d69a018a670802c44bbaa8ea82c7
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2517c0d79c536541150964083ac23d5de4060dc370d8f0ec3689b1156aa4f7aa
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:32677dd5265d4ab262e5443ab35b981dda41531fe46f4df931e44c41c6361215
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/65.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07e00e6e982b910c6e3ee172c1e2b8b9e79cbd132e259dd108979b658467844c
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/67.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8111a54a3b6bc8d758d49e54f80dc07b4768d5ed3dfa360e0927052e1d2aaf9
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/74.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5100489bca21e66159f554f56e3cd29d11deddb6254ccf468e3648112fa6bcad
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/78.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c3d49bfc6f7f1be42943bcc7f6cdb0533206217020c9f6f61b1f804566d720f
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/81.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:16a13720dfe3cc35f15ddb6c6837409d04e5e4840f86a841964e11bb57fd0974
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/87.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7748305a786d628676cad5aeef32262fdc8c1f4d711f79546410ec8d3696a1fb
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/92.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da0c6eed6b87b9ac0d88aeec2fc8e71792e62ed7592a9c3a63cd2c60c15fd771
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/95.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e57c3285de6c81846ea22bf598b3d44b44f0215731753efa44569f381b05e49f
3
+ size 84934656