NeMo
nvidia
jiaqiz commited on
Commit
c164158
1 Parent(s): a59e95a

Add files using large-upload tool

Browse files
Files changed (26) hide show
  1. .gitattributes +25 -0
  2. model_weights/model.decoder.layers.self_attention.linear_proj.weight/10.0.6 +3 -0
  3. model_weights/model.decoder.layers.self_attention.linear_proj.weight/14.0.1 +3 -0
  4. model_weights/model.decoder.layers.self_attention.linear_proj.weight/26.0.4 +3 -0
  5. model_weights/model.decoder.layers.self_attention.linear_proj.weight/26.0.6 +3 -0
  6. model_weights/model.decoder.layers.self_attention.linear_proj.weight/35.0.3 +3 -0
  7. model_weights/model.decoder.layers.self_attention.linear_proj.weight/39.0.0 +3 -0
  8. model_weights/model.decoder.layers.self_attention.linear_proj.weight/39.0.4 +3 -0
  9. model_weights/model.decoder.layers.self_attention.linear_proj.weight/45.0.0 +3 -0
  10. model_weights/model.decoder.layers.self_attention.linear_proj.weight/47.0.3 +3 -0
  11. model_weights/model.decoder.layers.self_attention.linear_proj.weight/50.0.0 +3 -0
  12. model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.2 +3 -0
  13. model_weights/model.decoder.layers.self_attention.linear_proj.weight/55.0.2 +3 -0
  14. model_weights/model.decoder.layers.self_attention.linear_proj.weight/60.0.4 +3 -0
  15. model_weights/model.decoder.layers.self_attention.linear_proj.weight/64.0.0 +3 -0
  16. model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.0 +3 -0
  17. model_weights/model.decoder.layers.self_attention.linear_proj.weight/75.0.4 +3 -0
  18. model_weights/model.decoder.layers.self_attention.linear_proj.weight/76.0.3 +3 -0
  19. model_weights/model.decoder.layers.self_attention.linear_proj.weight/76.0.7 +3 -0
  20. model_weights/model.decoder.layers.self_attention.linear_proj.weight/78.0.3 +3 -0
  21. model_weights/model.decoder.layers.self_attention.linear_proj.weight/87.0.5 +3 -0
  22. model_weights/model.decoder.layers.self_attention.linear_proj.weight/89.0.1 +3 -0
  23. model_weights/model.decoder.layers.self_attention.linear_proj.weight/91.0.7 +3 -0
  24. model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.0 +3 -0
  25. model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.3 +3 -0
  26. model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.5 +3 -0
.gitattributes CHANGED
@@ -2520,3 +2520,28 @@ model_weights/model.decoder.layers.self_attention.linear_proj.weight/25.0.6 filt
2520
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/22.0.4 filter=lfs diff=lfs merge=lfs -text
2521
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/2.0.1 filter=lfs diff=lfs merge=lfs -text
2522
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/61.0.1 filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2520
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/22.0.4 filter=lfs diff=lfs merge=lfs -text
2521
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/2.0.1 filter=lfs diff=lfs merge=lfs -text
2522
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/61.0.1 filter=lfs diff=lfs merge=lfs -text
2523
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.5 filter=lfs diff=lfs merge=lfs -text
2524
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/50.0.0 filter=lfs diff=lfs merge=lfs -text
2525
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/55.0.2 filter=lfs diff=lfs merge=lfs -text
2526
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/87.0.5 filter=lfs diff=lfs merge=lfs -text
2527
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/10.0.6 filter=lfs diff=lfs merge=lfs -text
2528
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/35.0.3 filter=lfs diff=lfs merge=lfs -text
2529
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.0 filter=lfs diff=lfs merge=lfs -text
2530
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.3 filter=lfs diff=lfs merge=lfs -text
2531
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.0 filter=lfs diff=lfs merge=lfs -text
2532
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/14.0.1 filter=lfs diff=lfs merge=lfs -text
2533
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.2 filter=lfs diff=lfs merge=lfs -text
2534
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/75.0.4 filter=lfs diff=lfs merge=lfs -text
2535
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/26.0.6 filter=lfs diff=lfs merge=lfs -text
2536
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/45.0.0 filter=lfs diff=lfs merge=lfs -text
2537
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/78.0.3 filter=lfs diff=lfs merge=lfs -text
2538
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/91.0.7 filter=lfs diff=lfs merge=lfs -text
2539
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/39.0.0 filter=lfs diff=lfs merge=lfs -text
2540
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/89.0.1 filter=lfs diff=lfs merge=lfs -text
2541
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/60.0.4 filter=lfs diff=lfs merge=lfs -text
2542
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/76.0.7 filter=lfs diff=lfs merge=lfs -text
2543
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/26.0.4 filter=lfs diff=lfs merge=lfs -text
2544
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/47.0.3 filter=lfs diff=lfs merge=lfs -text
2545
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/39.0.4 filter=lfs diff=lfs merge=lfs -text
2546
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/76.0.3 filter=lfs diff=lfs merge=lfs -text
2547
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/64.0.0 filter=lfs diff=lfs merge=lfs -text
model_weights/model.decoder.layers.self_attention.linear_proj.weight/10.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80d9186ee3ddeb1b028014c1aa0ad0dd2e28d5e4246a6b686fb12fc7c48cd6c9
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/14.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3180c971551fa8f86d6205df02821fdcaa959e33a83e1f63119119c7068f2fa2
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/26.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4a1dec99b7b11825f1b30ff5834642fd0deea2dec84e9fd0187cb4604354be9
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/26.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb2b1abbe7411deb3df2dadebc47d949c64a9099c383d9f77a1fa9fb60fe5323
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/35.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02be21a889bdfe95b619b6bda1372de4584f78e6caa7da374e691a6164c7296c
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/39.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db9a5c40e44fa915c8558c88df1f865d7cd5fd1111219cd59a9080e97c917bb6
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/39.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee8fb46e0816785555a6df3042d05591107553ad051b83ff7461fcce81ae3261
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/45.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ce808a1cc7e696ee2dede6b60861e3008f698d74d6d57c7809c9073fe5445e4
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/47.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bb8a9ed7ffd801dbb44b800a3081f72a932a2640b6b92d0e0c2ad466d6042523
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/50.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17447047ed133e72f2f830af8aef34e8917e9738d049217e73fb90a7cd01ea4e
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2cb2fcb2cdf52bb39e6e2bcb1ea71a0f5b27a7d0c90c6095a384344d7d9e4d56
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/55.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c00eeee40bdf3720bc66c93a3698569fedb27abf14ab32bca2e82ae11325bcd5
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/60.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1261e98e891337e1e804d2b3767ff9278453c9dbb2b4807779ac85d8ef74c5fd
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/64.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0728cdb6a0d8150feb6595506c8d31134259ce263c2815d90447e190456a0eb1
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:060c6b29e89ead3f1d99f9f8005ec08ff3af9eff7dc37169e4b4619fe3237731
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/75.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08c3e02f795bbfe4f2d4ed86a2a1372e8e73c229f31f4f8dead0c7aa96e710ed
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/76.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e1244731564f10935f958a9496468992220c91290e800ae016fb79f5ed7bd6d
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/76.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7471a114f1880b54dc7e6dc36391bf58760ec8ce0fb2d2d52ef7cd62e466f998
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/78.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:180261a4d5a6371c24987e216df7ff6e1fab1f671d88494598ca395c96e3abcf
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/87.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef037fd8d67a155d0740bafc72a847563996c53b93c2ac5c0001400b43e04ae9
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/89.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57b0455eee1b6e5c1ae05b22010045b9ae45531edb87c59fb1bd95d29e96c52a
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/91.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12ccac75fe3404d4e54e1117c39fcd6e51bcc6fd74e8a7b99245628b0cb7e65b
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:491ac25c32fcb496b708ffee46fde24f5a6d4a30a975949644cb0358339faaff
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cee67817745e5d2c2c7c2b9c6b1de7853b40730cfca6ebffb0702f93b8731c40
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/94.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d14f7e1719dd753e2ce6d06f1719f521ea177d465c029c99447a746090891a9
3
+ size 84934656