NeMo
okuchaiev commited on
Commit
e72b415
1 Parent(s): 950e64c

Add files using large-upload tool

Browse files
Files changed (26) hide show
  1. .gitattributes +15 -0
  2. model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_17_96.pt +3 -0
  3. model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_39_96.pt +3 -0
  4. model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_75_96.pt +3 -0
  5. model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_79_96.pt +3 -0
  6. model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_85_96.pt +3 -0
  7. model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_87_96.pt +3 -0
  8. model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_89_96.pt +3 -0
  9. model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_90_96.pt +3 -0
  10. model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_91_96.pt +3 -0
  11. model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_95_96.pt +3 -0
  12. model_weights/model.decoder.layers.self_attention.linear_proj.weight/1.0.0 +3 -0
  13. model_weights/model.decoder.layers.self_attention.linear_proj.weight/14.0.3 +3 -0
  14. model_weights/model.decoder.layers.self_attention.linear_proj.weight/16.0.1 +3 -0
  15. model_weights/model.decoder.layers.self_attention.linear_proj.weight/27.0.2 +3 -0
  16. model_weights/model.decoder.layers.self_attention.linear_proj.weight/34.0.6 +3 -0
  17. model_weights/model.decoder.layers.self_attention.linear_proj.weight/38.0.2 +3 -0
  18. model_weights/model.decoder.layers.self_attention.linear_proj.weight/5.0.0 +3 -0
  19. model_weights/model.decoder.layers.self_attention.linear_proj.weight/51.0.3 +3 -0
  20. model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.1 +3 -0
  21. model_weights/model.decoder.layers.self_attention.linear_proj.weight/58.0.1 +3 -0
  22. model_weights/model.decoder.layers.self_attention.linear_proj.weight/74.0.5 +3 -0
  23. model_weights/model.decoder.layers.self_attention.linear_proj.weight/80.0.3 +3 -0
  24. model_weights/model.decoder.layers.self_attention.linear_proj.weight/84.0.0 +3 -0
  25. model_weights/model.decoder.layers.self_attention.linear_proj.weight/85.0.3 +3 -0
  26. model_weights/model.decoder.layers.self_attention.linear_proj.weight/89.0.4 +3 -0
.gitattributes CHANGED
@@ -1510,3 +1510,18 @@ model_weights/model.decoder.layers.self_attention.linear_proj.weight/67.0.1 filt
1510
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/70.0.2 filter=lfs diff=lfs merge=lfs -text
1511
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/59.0.3 filter=lfs diff=lfs merge=lfs -text
1512
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.7 filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1510
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/70.0.2 filter=lfs diff=lfs merge=lfs -text
1511
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/59.0.3 filter=lfs diff=lfs merge=lfs -text
1512
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.7 filter=lfs diff=lfs merge=lfs -text
1513
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/1.0.0 filter=lfs diff=lfs merge=lfs -text
1514
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/80.0.3 filter=lfs diff=lfs merge=lfs -text
1515
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/27.0.2 filter=lfs diff=lfs merge=lfs -text
1516
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/38.0.2 filter=lfs diff=lfs merge=lfs -text
1517
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/58.0.1 filter=lfs diff=lfs merge=lfs -text
1518
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/74.0.5 filter=lfs diff=lfs merge=lfs -text
1519
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/14.0.3 filter=lfs diff=lfs merge=lfs -text
1520
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/5.0.0 filter=lfs diff=lfs merge=lfs -text
1521
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/89.0.4 filter=lfs diff=lfs merge=lfs -text
1522
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/51.0.3 filter=lfs diff=lfs merge=lfs -text
1523
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/16.0.1 filter=lfs diff=lfs merge=lfs -text
1524
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/34.0.6 filter=lfs diff=lfs merge=lfs -text
1525
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.1 filter=lfs diff=lfs merge=lfs -text
1526
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/85.0.3 filter=lfs diff=lfs merge=lfs -text
1527
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/84.0.0 filter=lfs diff=lfs merge=lfs -text
model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_17_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abcbc429045a564b70b5b8e6a3aebd65d5c9033d0851500eb76c099f47d78a95
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_39_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b109bc001c35c681140f7729d952690a2e524e10f7f31b66473cd9368535724
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_75_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af07f6ddc756fcb79a4871a38bab569ed54f5c8e92d27ab935c1719ec6d9ad7b
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_79_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbcdcca6c7c76a6af8182f224ae09b269d499a7e1e622db91d78e6a034ad4dd5
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_85_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc4edfcb2c11792b55ddf7c83de62c3bb26d09db3990f5e703c0a864bf1d9635
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_87_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b05019f3b231fd0abf8b970f1dc9fe9c9258e169604f125c8aa4defb579a697b
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_89_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:973b9eac5c2f2012525ea4b78ed498c34a1294b3c1c7e6e7f2ea0453c6e9ea07
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_90_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1784707fcd135c45e6816dddceadc9820f25df2fb07797c61727ad5897929465
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_91_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01048ad3a21833fd1c3279d20e19d6190d5d695c68f738e3b6401bb8b6601ed3
3
+ size 1840
model_weights/model.decoder.layers.mlp.linear_fc2._extra_state/shard_95_96.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ed9b0e88ea003e4694006347597bbf59fbaec88fec35807929aadf816051d8e
3
+ size 1840
model_weights/model.decoder.layers.self_attention.linear_proj.weight/1.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:771d1cfc5ea6319040346e80508402c8830d5d9197f55d988b90af7c93031aa4
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/14.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c6fc75a4e9a46c32c47e8ce60e37e9ccfedeaab00a236d89e6e1e23169253cf
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/16.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d51f5a4007e4ec5921178b4d74acb4b607f2ca71c788df4bf16dac2e96725b6
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/27.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d95f81d9f6b63f61d2039d6ec1d38b8bd2d94422af499ab2b1ec3740764112fd
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/34.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7066466d0ecc676c8c5f6ae6dfe43f97db477c2c4ef4a883f3092fbf869162cc
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/38.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:653c977e66fdeec75da26a780834e1c42df8a7bd117a546b6335c94216d15a3b
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/5.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf85757b94c60219e215ef89020f045903d69e5897e78bc7c24b823387a8d1dc
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/51.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60f95e6586a4a735bd01c7adc37866794a410255afa81682ffc318dc7dc7d480
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/57.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:128e5dd6a36ae62882d8015890229b89f8a2ef069bdc776be5e54ed727ab2ac4
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/58.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d67e2088f207f92022f4472155ed5c74319c9c27848afe8cc2e2496e91d8a6b
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/74.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e82b09b70449a9a49e24ee5c9824149f7ef613c4bd4342eb7caecd7ef18d002e
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/80.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:836cb08f4c53a751b2de57f25f23d473199106bf6bbd265071ffda7239bf6e7f
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/84.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce01d32ba63c65c13685c2de6f1c13e70d645ba534749e14eb665a29b1bed200
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/85.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4701b50bf8a15175c59ca9f5886fa7057a9b72fdc1ccfd3948af7695418b6fc0
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/89.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c234d5b03486b98e45d3edb0f2f33db482bc07ffe064ed8815ae830cbdab171
3
+ size 84934656