NeMo
okuchaiev commited on
Commit
666ceb9
1 Parent(s): 82c048c

Add files using large-upload tool

Browse files
Files changed (26) hide show
  1. .gitattributes +25 -0
  2. model_weights/model.decoder.layers.self_attention.linear_proj.weight/16.0.3 +3 -0
  3. model_weights/model.decoder.layers.self_attention.linear_proj.weight/18.0.3 +3 -0
  4. model_weights/model.decoder.layers.self_attention.linear_proj.weight/20.0.3 +3 -0
  5. model_weights/model.decoder.layers.self_attention.linear_proj.weight/21.0.0 +3 -0
  6. model_weights/model.decoder.layers.self_attention.linear_proj.weight/27.0.1 +3 -0
  7. model_weights/model.decoder.layers.self_attention.linear_proj.weight/31.0.6 +3 -0
  8. model_weights/model.decoder.layers.self_attention.linear_proj.weight/32.0.1 +3 -0
  9. model_weights/model.decoder.layers.self_attention.linear_proj.weight/32.0.2 +3 -0
  10. model_weights/model.decoder.layers.self_attention.linear_proj.weight/32.0.5 +3 -0
  11. model_weights/model.decoder.layers.self_attention.linear_proj.weight/38.0.4 +3 -0
  12. model_weights/model.decoder.layers.self_attention.linear_proj.weight/47.0.5 +3 -0
  13. model_weights/model.decoder.layers.self_attention.linear_proj.weight/52.0.5 +3 -0
  14. model_weights/model.decoder.layers.self_attention.linear_proj.weight/59.0.4 +3 -0
  15. model_weights/model.decoder.layers.self_attention.linear_proj.weight/6.0.6 +3 -0
  16. model_weights/model.decoder.layers.self_attention.linear_proj.weight/63.0.7 +3 -0
  17. model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.0 +3 -0
  18. model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.4 +3 -0
  19. model_weights/model.decoder.layers.self_attention.linear_proj.weight/72.0.3 +3 -0
  20. model_weights/model.decoder.layers.self_attention.linear_proj.weight/73.0.7 +3 -0
  21. model_weights/model.decoder.layers.self_attention.linear_proj.weight/74.0.2 +3 -0
  22. model_weights/model.decoder.layers.self_attention.linear_proj.weight/74.0.6 +3 -0
  23. model_weights/model.decoder.layers.self_attention.linear_proj.weight/75.0.5 +3 -0
  24. model_weights/model.decoder.layers.self_attention.linear_proj.weight/88.0.0 +3 -0
  25. model_weights/model.decoder.layers.self_attention.linear_proj.weight/9.0.5 +3 -0
  26. model_weights/model.decoder.layers.self_attention.linear_proj.weight/90.0.1 +3 -0
.gitattributes CHANGED
@@ -1110,3 +1110,28 @@ model_weights/model.decoder.layers.self_attention.linear_proj.weight/62.0.2 filt
1110
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/47.0.1 filter=lfs diff=lfs merge=lfs -text
1111
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/41.0.5 filter=lfs diff=lfs merge=lfs -text
1112
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/19.0.6 filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1110
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/47.0.1 filter=lfs diff=lfs merge=lfs -text
1111
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/41.0.5 filter=lfs diff=lfs merge=lfs -text
1112
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/19.0.6 filter=lfs diff=lfs merge=lfs -text
1113
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/88.0.0 filter=lfs diff=lfs merge=lfs -text
1114
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/31.0.6 filter=lfs diff=lfs merge=lfs -text
1115
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/32.0.2 filter=lfs diff=lfs merge=lfs -text
1116
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/18.0.3 filter=lfs diff=lfs merge=lfs -text
1117
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/27.0.1 filter=lfs diff=lfs merge=lfs -text
1118
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/72.0.3 filter=lfs diff=lfs merge=lfs -text
1119
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/16.0.3 filter=lfs diff=lfs merge=lfs -text
1120
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/74.0.2 filter=lfs diff=lfs merge=lfs -text
1121
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.0 filter=lfs diff=lfs merge=lfs -text
1122
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/90.0.1 filter=lfs diff=lfs merge=lfs -text
1123
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/73.0.7 filter=lfs diff=lfs merge=lfs -text
1124
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/75.0.5 filter=lfs diff=lfs merge=lfs -text
1125
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.4 filter=lfs diff=lfs merge=lfs -text
1126
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/47.0.5 filter=lfs diff=lfs merge=lfs -text
1127
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/21.0.0 filter=lfs diff=lfs merge=lfs -text
1128
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/38.0.4 filter=lfs diff=lfs merge=lfs -text
1129
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/9.0.5 filter=lfs diff=lfs merge=lfs -text
1130
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/6.0.6 filter=lfs diff=lfs merge=lfs -text
1131
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/20.0.3 filter=lfs diff=lfs merge=lfs -text
1132
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/63.0.7 filter=lfs diff=lfs merge=lfs -text
1133
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/32.0.5 filter=lfs diff=lfs merge=lfs -text
1134
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/74.0.6 filter=lfs diff=lfs merge=lfs -text
1135
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/52.0.5 filter=lfs diff=lfs merge=lfs -text
1136
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/59.0.4 filter=lfs diff=lfs merge=lfs -text
1137
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/32.0.1 filter=lfs diff=lfs merge=lfs -text
model_weights/model.decoder.layers.self_attention.linear_proj.weight/16.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bdc6a49f54325ee7b2a2e6872b3361a5a746aad16f414aea8234f89b6634455c
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/18.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4588f4f7afad2460e4c598081807dd9e4d6c1073b2c6be1a878a5e216f66a0ee
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/20.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3b1dfeec4afce58e463fea7ca3c8c53ce547e0f9e9a9fdb4616c2f5ac81dbac
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/21.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6790d051c211b8fc04e7a08611b75924c6c71a181a784e7a4a5822fc133f568
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/27.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:628ec154a7085cc5a02eb699bf82ec48b2cacd4d9ba6d20908c9672297e99369
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/31.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29d12ecdb3446513ed460f8167c1fde738ebc6b92381d6178ad24510d590a671
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/32.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9a23c66a9f4a04c64001fd7c700d44f6ac2f0f239601c1a0521a850a545e801
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/32.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7dad174f6d53179e89302fc300f3d07ae4891de61ab67d11da48cf2d9fc14dd
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/32.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c68fedd650709a2e1618e029499913da1341a588187c41fb97949bcfbc1909c
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/38.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fbccb3408c3a4569d8c9008b3a0069753a540845d206377941362771b9074985
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/47.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8ab7981a89216b7625e03ae49f78ecef0bc1aa4c8bf375e5ed8c9aafc2fac6a
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/52.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bb427cd8a2f666f00b9b880015c932e111bd6a182b738daab2b5ac2295a6f8c
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/59.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee5389529bf641b5de7e2754d34aecb8b262e6ae65243916cede113ffc1f82ca
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/6.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8c809f5d405f03706c9f7dc1649e2c735c0c1fce6d8891b8436b329625ac07b
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/63.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:917ffab243cbbbc02efe29affc395cc75971a3ae4935d23f1a74d8933251bb07
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8164d9857beadcdd14bd26d5b3dddd0ffd7df01bd69f69d514ba6104e6da006
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/66.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:edd4a643ba531b7754957f941ff87cb6baa279d5775d2846e6483bb185c19d94
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/72.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5d048350643ac7b3147f27533c98cdbd36d69a792bf83529a8120a6795b1da2
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/73.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c85fc4a87fb06ceaa54aecd2312581e13f858d3d5a76c65e099470d7c4f4e581
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/74.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0dfe3d086d5010f9e5124c93e906790988e386ce3305f889912f0cc07d2c457
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/74.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f71270d8c7f6406937cfde769f483ba13bb5e6d01452e6c23f4b80a50435d74f
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/75.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ece7db088ba49bf3e5370d9a4d8b0993a0cf4e02995f6ba9ffcb20642ff4548
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/88.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3e4c16c313f3a2970620704e3625e61905001ddf3c4b99f6b59fb96f1de3b92
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/9.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8801a3f6f825cf28b48e7e61f6e3fa10b394de6339574bf556cd53b695ffcffa
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/90.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5801213e34d8275aa9881f1512742af96ab9c16f0ce93a14d7575dd1c106d75
3
+ size 84934656