NeMo
nvidia
jiaqiz commited on
Commit
c954e08
1 Parent(s): a107dc8

Add files using large-upload tool

Browse files
Files changed (26) hide show
  1. .gitattributes +25 -0
  2. model_weights/model.decoder.layers.self_attention.linear_proj.weight/14.0.7 +3 -0
  3. model_weights/model.decoder.layers.self_attention.linear_proj.weight/19.0.5 +3 -0
  4. model_weights/model.decoder.layers.self_attention.linear_proj.weight/19.0.6 +3 -0
  5. model_weights/model.decoder.layers.self_attention.linear_proj.weight/2.0.7 +3 -0
  6. model_weights/model.decoder.layers.self_attention.linear_proj.weight/21.0.1 +3 -0
  7. model_weights/model.decoder.layers.self_attention.linear_proj.weight/45.0.3 +3 -0
  8. model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.3 +3 -0
  9. model_weights/model.decoder.layers.self_attention.linear_proj.weight/48.0.4 +3 -0
  10. model_weights/model.decoder.layers.self_attention.linear_proj.weight/48.0.6 +3 -0
  11. model_weights/model.decoder.layers.self_attention.linear_proj.weight/49.0.1 +3 -0
  12. model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.6 +3 -0
  13. model_weights/model.decoder.layers.self_attention.linear_proj.weight/61.0.3 +3 -0
  14. model_weights/model.decoder.layers.self_attention.linear_proj.weight/62.0.1 +3 -0
  15. model_weights/model.decoder.layers.self_attention.linear_proj.weight/69.0.3 +3 -0
  16. model_weights/model.decoder.layers.self_attention.linear_proj.weight/70.0.3 +3 -0
  17. model_weights/model.decoder.layers.self_attention.linear_proj.weight/81.0.5 +3 -0
  18. model_weights/model.decoder.layers.self_attention.linear_proj.weight/83.0.0 +3 -0
  19. model_weights/model.decoder.layers.self_attention.linear_proj.weight/89.0.7 +3 -0
  20. model_weights/model.decoder.layers.self_attention.linear_proj.weight/9.0.2 +3 -0
  21. model_weights/model.decoder.layers.self_attention.linear_proj.weight/91.0.4 +3 -0
  22. model_weights/model.decoder.layers.self_attention.linear_proj.weight/91.0.5 +3 -0
  23. model_weights/model.decoder.layers.self_attention.linear_proj.weight/92.0.1 +3 -0
  24. model_weights/model.decoder.layers.self_attention.linear_proj.weight/93.0.4 +3 -0
  25. model_weights/model.output_layer.weight/3.0 +3 -0
  26. model_weights/model.output_layer.weight/7.0 +3 -0
.gitattributes CHANGED
@@ -2620,3 +2620,28 @@ model_weights/model.decoder.layers.self_attention.linear_proj.weight/42.0.6 filt
2620
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/79.0.2 filter=lfs diff=lfs merge=lfs -text
2621
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/28.0.3 filter=lfs diff=lfs merge=lfs -text
2622
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/35.0.0 filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2620
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/79.0.2 filter=lfs diff=lfs merge=lfs -text
2621
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/28.0.3 filter=lfs diff=lfs merge=lfs -text
2622
  model_weights/model.decoder.layers.self_attention.linear_proj.weight/35.0.0 filter=lfs diff=lfs merge=lfs -text
2623
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/61.0.3 filter=lfs diff=lfs merge=lfs -text
2624
+ model_weights/model.output_layer.weight/3.0 filter=lfs diff=lfs merge=lfs -text
2625
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/48.0.4 filter=lfs diff=lfs merge=lfs -text
2626
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/70.0.3 filter=lfs diff=lfs merge=lfs -text
2627
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/14.0.7 filter=lfs diff=lfs merge=lfs -text
2628
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/92.0.1 filter=lfs diff=lfs merge=lfs -text
2629
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/91.0.5 filter=lfs diff=lfs merge=lfs -text
2630
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/83.0.0 filter=lfs diff=lfs merge=lfs -text
2631
+ model_weights/model.output_layer.weight/7.0 filter=lfs diff=lfs merge=lfs -text
2632
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/2.0.7 filter=lfs diff=lfs merge=lfs -text
2633
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.6 filter=lfs diff=lfs merge=lfs -text
2634
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/62.0.1 filter=lfs diff=lfs merge=lfs -text
2635
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/93.0.4 filter=lfs diff=lfs merge=lfs -text
2636
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/49.0.1 filter=lfs diff=lfs merge=lfs -text
2637
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/9.0.2 filter=lfs diff=lfs merge=lfs -text
2638
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/89.0.7 filter=lfs diff=lfs merge=lfs -text
2639
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/19.0.5 filter=lfs diff=lfs merge=lfs -text
2640
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/48.0.6 filter=lfs diff=lfs merge=lfs -text
2641
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/69.0.3 filter=lfs diff=lfs merge=lfs -text
2642
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.3 filter=lfs diff=lfs merge=lfs -text
2643
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/81.0.5 filter=lfs diff=lfs merge=lfs -text
2644
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/21.0.1 filter=lfs diff=lfs merge=lfs -text
2645
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/45.0.3 filter=lfs diff=lfs merge=lfs -text
2646
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/19.0.6 filter=lfs diff=lfs merge=lfs -text
2647
+ model_weights/model.decoder.layers.self_attention.linear_proj.weight/91.0.4 filter=lfs diff=lfs merge=lfs -text
model_weights/model.decoder.layers.self_attention.linear_proj.weight/14.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be98a5ae59ad92737cdcac319ee434750f98116d13ef08a5e56e0ede3b2d5d48
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/19.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:540b7d02f263c98162ae98feadb178ac44e806bbab3925d1c45cdc98f7c4470f
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/19.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a59daef72d5ae8fbe918825dc36502600c5a93e99df655c9b1b97693875b7e3
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/2.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e952b2267d07f745765362b69dce7acfd979748c8e4965fee5b316ef73a83206
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/21.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aae833259c9f9af17eed1f0977501a71d6e947fa3b7b76d935367c97de2c832a
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/45.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe24df7bf2f8e0926f0bf17ba10f5668e26c0082ec88a62a51272c6dbd9e9998
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/46.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f7e3cd25e574e7d9b35883f25de994bf272807ef49d1009edc7d0e54526f24c
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/48.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55debe83971cda17c0e6b201915aa353dcafcb4cc4a099e1004e86ef73878134
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/48.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b92634e17616e19d6732560cce2508451019dd48324676a77207cfeda46344c0
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/49.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f8db470853ddb4ded18371ac2873efbea848f8ee954cb94f2c1f669ccb77db3
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/53.0.6 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:996bacad62a482cd78e5cab4fc121727f8203ecc27d88a5eb633d93ae5e74139
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/61.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b6b6e6f7bcdabcad57c5591ad25df95e49d7bf73bb4ddeb8765835d561eb219
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/62.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:64f11a2423f1b4acdf2ac560d079217d6c79235bd3e08c92018ce1d1ffee200c
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/69.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bdaeeaaf9695ca5eb65c2e43ef98063f9eded300c5550a4e26851efd802ea1d3
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/70.0.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72b47889f672c2072f92ec0d29b3352d5caeb9e5edd5b4c26cf5cf3791ffef37
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/81.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d14fbc3d92e5ca571dce95588e32ffe7c24a72ac558c658161294e2739336fe
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/83.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:907180ebc3ec0b0ddd93abd320e4c7932c08d4def3b30e8bd76f7c8a5b5e915d
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/89.0.7 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dab62566eb6b398354ee3cf3974b4eda34781757ba6d839ebc336037ec04d3b4
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/9.0.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:229894d0cee9c2986a33d5ece00006a9a0435f869bf58372383a328c16ff67a5
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/91.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74d373cdb2e46083c73f7d201542796e9870cf28cda3900e8d5a68216b91dd68
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/91.0.5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62fed4941bb78457f7a0fa0d6b2f8f212f549945fe8106d914d5c6456ef9843b
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/92.0.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b30b4bc774e6866434c48c1814ea39f2e90b3525850fd200858ffb5db90f54f
3
+ size 84934656
model_weights/model.decoder.layers.self_attention.linear_proj.weight/93.0.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7abb4921424acdd15545920d9230272a9f9d6d7add5f798fe6c5ffe664c334f
3
+ size 84934656
model_weights/model.output_layer.weight/3.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65386428ef67699ec52486d75dd60fde9e1bd90eb2b0fb2f11d3ba353337dfc0
3
+ size 1179648000
model_weights/model.output_layer.weight/7.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:975cac484aa7556dbf6da413d67cc0fbc963e5192ac7bbcf88ecf9e8cfa273d9
3
+ size 1179648000