vitoplantamura commited on
Commit
fd022c4
1 Parent(s): c54c039

Initial commit

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. TinyLlama-1.1B-Chat-v0.3-fp16/2646.bin +3 -0
  2. TinyLlama-1.1B-Chat-v0.3-fp16/2648.bin +3 -0
  3. TinyLlama-1.1B-Chat-v0.3-fp16/2652.bin +3 -0
  4. TinyLlama-1.1B-Chat-v0.3-fp16/2656.bin +3 -0
  5. TinyLlama-1.1B-Chat-v0.3-fp16/3681.bin +3 -0
  6. TinyLlama-1.1B-Chat-v0.3-fp16/6390.bin +3 -0
  7. TinyLlama-1.1B-Chat-v0.3-fp16/6478.bin +3 -0
  8. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_ConstantOfShape_5F_2_5F_output_5F_0.bin +3 -0
  9. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_15_5F_output_5F_0.bin +3 -0
  10. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_16_5F_output_5F_0.bin +3 -0
  11. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_1_5F_output_5F_0.bin +3 -0
  12. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_2_5F_output_5F_0.bin +3 -0
  13. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_35_5F_output_5F_0.bin +3 -0
  14. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_36_5F_output_5F_0.bin +3 -0
  15. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_3_5F_output_5F_0.bin +3 -0
  16. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_6_5F_output_5F_0.bin +3 -0
  17. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_8_5F_output_5F_0.bin +3 -0
  18. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Mul_5F_output_5F_0.bin +3 -0
  19. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_input_5F_layernorm_2F_Constant_5F_1_5F_output_5F_0.bin +3 -0
  20. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_input_5F_layernorm_2F_Constant_5F_output_5F_0.bin +3 -0
  21. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_ConstantOfShape_5F_output_5F_0.bin +3 -0
  22. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Constant_5F_17_5F_output_5F_0.bin +3 -0
  23. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Constant_5F_2_5F_output_5F_0.bin +3 -0
  24. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Constant_5F_3_5F_output_5F_0.bin +3 -0
  25. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Constant_5F_42_5F_output_5F_0.bin +3 -0
  26. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Constant_5F_4_5F_output_5F_0.bin +3 -0
  27. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Constant_5F_61_5F_output_5F_0.bin +3 -0
  28. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Mul_5F_4_5F_output_5F_0.bin +3 -0
  29. TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_rotary_5F_emb_2F_Constant_5F_output_5F_0.bin +3 -0
  30. TinyLlama-1.1B-Chat-v0.3-fp16/model.txt +0 -0
  31. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_embed_5F_tokens_2E_weight.bin +3 -0
  32. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_0_2E_input_5F_layernorm_2E_weight.bin +3 -0
  33. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_0_2E_post_5F_attention_5F_layernorm_2E_weight.bin +3 -0
  34. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_10_2E_input_5F_layernorm_2E_weight.bin +3 -0
  35. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_10_2E_post_5F_attention_5F_layernorm_2E_weight.bin +3 -0
  36. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_11_2E_input_5F_layernorm_2E_weight.bin +3 -0
  37. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_11_2E_post_5F_attention_5F_layernorm_2E_weight.bin +3 -0
  38. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_12_2E_input_5F_layernorm_2E_weight.bin +3 -0
  39. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_12_2E_post_5F_attention_5F_layernorm_2E_weight.bin +3 -0
  40. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_13_2E_input_5F_layernorm_2E_weight.bin +3 -0
  41. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_13_2E_post_5F_attention_5F_layernorm_2E_weight.bin +3 -0
  42. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_14_2E_input_5F_layernorm_2E_weight.bin +3 -0
  43. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_14_2E_post_5F_attention_5F_layernorm_2E_weight.bin +3 -0
  44. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_15_2E_input_5F_layernorm_2E_weight.bin +3 -0
  45. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_15_2E_post_5F_attention_5F_layernorm_2E_weight.bin +3 -0
  46. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_16_2E_input_5F_layernorm_2E_weight.bin +3 -0
  47. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_16_2E_post_5F_attention_5F_layernorm_2E_weight.bin +3 -0
  48. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_17_2E_input_5F_layernorm_2E_weight.bin +3 -0
  49. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_17_2E_post_5F_attention_5F_layernorm_2E_weight.bin +3 -0
  50. TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_18_2E_input_5F_layernorm_2E_weight.bin +3 -0
TinyLlama-1.1B-Chat-v0.3-fp16/2646.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4208ddad581b70362ef04696faa16b7e40fc6b9e6f39432a9a50bf6085ff8d67
3
+ size 32
TinyLlama-1.1B-Chat-v0.3-fp16/2648.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ccef977509a25a0c338e1cf666df4be92633cb2eadc7df97a069695edd5d1a86
3
+ size 32
TinyLlama-1.1B-Chat-v0.3-fp16/2652.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e10808fba106723b356038846563c0eb82b60960e99b183681042950c720057
3
+ size 32
TinyLlama-1.1B-Chat-v0.3-fp16/2656.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75c48b63dbfb2dec0201fbf444a1c8101451f4811034828fb5b8e77804f1efa8
3
+ size 24
TinyLlama-1.1B-Chat-v0.3-fp16/3681.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9cb04ba987a95a535a8ba7d18b3815d0f04b47add63f2b18ed0a66f9a6d617a
3
+ size 16
TinyLlama-1.1B-Chat-v0.3-fp16/6390.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d34149fbd1fe777eb238799054c8cbfbce372255f219f8740838def9bfd02db
3
+ size 16
TinyLlama-1.1B-Chat-v0.3-fp16/6478.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c730b69905c5ef7a4ca5269f72365400bde2dd2c04eaf9bbb3d1c4a265a0131
3
+ size 16
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_ConstantOfShape_5F_2_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e4ffd7d196e150659e625599778aa9c37538bb184fce71b42b1d5b339f2aeec
3
+ size 32
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_15_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c9fa136d4413fa6173637e883b6998d32e1d675f88cddff9dcbcf331820f4b8
3
+ size 8
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_16_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96a296d224f285c67bee93c30f8a309157f0daa35dc5b87e410b78630a09cfc7
3
+ size 2
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_1_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c9fa136d4413fa6173637e883b6998d32e1d675f88cddff9dcbcf331820f4b8
3
+ size 8
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_2_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d86e8112f3c4c4442126f8e9f44f16867da487f29052bf91b810457db34209a4
3
+ size 8
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_35_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:505114fe537172ea35e17ca1a7516edac516a89b31f983f7c6387d5d2bb462aa
3
+ size 2
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_36_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67587acca92f43dc61b4c60e1fe7fb9e4b5820bf2d236de1af052de57219f27f
3
+ size 2
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_3_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12a3ae445661ce5dee78d0650d33362dec29c4f82af05e7e57fb595bbbacf0ca
3
+ size 8
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_6_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a69a6cc7473a16302890cd2a9e93e347281f6ea0e1bb784e589753bed0b3324
3
+ size 8
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Constant_5F_8_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af5570f5a1810b7af78caf4bc70a660f0df51e42baf91d4de5b2328de0e83dfc
3
+ size 8
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_Mul_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af9613760f72635fbdb44a5a0a63c39f12af30f950a6ee5c971be188e89c4051
3
+ size 32
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_input_5F_layernorm_2F_Constant_5F_1_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8094903f94ffaa75bc82243f7de1f907cc2005bac5f8ee0a55e9f20fc9ffd7e7
3
+ size 2
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_input_5F_layernorm_2F_Constant_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8811852747cfa3620c3dd2af5d59498c240f208e689b4052bac934c29faf094
3
+ size 2
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_ConstantOfShape_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00026c99bfe39e6f7afdfa71a85dfec6909cc9210d43d230b66bc82713bc2857
3
+ size 40
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Constant_5F_17_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35be322d094f9d154a8aba4733b8497f180353bd7ae7b0a15f90b586b549f28b
3
+ size 8
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Constant_5F_2_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d4ac218fb54041e3a70a8e14db1ea1af9f570f4842b4702ef9323f1ad8f0ec4
3
+ size 8
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Constant_5F_3_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a06f129fc52abf6085679d7cd71dc41ec7580c7f5f73efef6d02dde22bb00994
3
+ size 8
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Constant_5F_42_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6cc16abd70eefb90dc0ba0d14fb088630873b2c6ad943f7442356735984c35a3
3
+ size 8
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Constant_5F_4_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0a0278e4372459cca6159cd5e71cfee638302a7b9ca9b05c34181ac0a65ac5d
3
+ size 8
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Constant_5F_61_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d2ae2d6e0000b72a5739078e0e83bc029b6232edabff84d012acd1d4803db51
3
+ size 2
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_Mul_5F_4_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ecd0f0bd7cf53c56d2129820911a26f815949eee418ca46b4f3d7a80cd969a7
3
+ size 40
TinyLlama-1.1B-Chat-v0.3-fp16/_2F_model_2F_model_2F_layers_2E_0_2F_self_5F_attn_2F_rotary_5F_emb_2F_Constant_5F_output_5F_0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d86e8112f3c4c4442126f8e9f44f16867da487f29052bf91b810457db34209a4
3
+ size 8
TinyLlama-1.1B-Chat-v0.3-fp16/model.txt ADDED
The diff for this file is too large to render. See raw diff
 
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_embed_5F_tokens_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:286e6303e7ad39d694acb3e84776690fbbd1191ad64f2c7bf6aeba9faa4a71d6
3
+ size 131084288
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_0_2E_input_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85687f2a78f67325b8cfde202d2aa65b7c057858dc8292f28b9442fd55cb1eb8
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_0_2E_post_5F_attention_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49ea228e737731edae303508b21723f820ce8b8aa9bf44eadcc27245947d57a3
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_10_2E_input_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:071a1b7706e2f4197b061c8ecea9797d6674ce0e7c01c227d6e2d20aa8949533
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_10_2E_post_5F_attention_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d484d19d004a3f081329134ae333f6f48ba66667058644d085fd17943378ed7
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_11_2E_input_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f262ce226d9df5f1f6ebc0246d17defcdf20c88f491e9737c07967165ebef741
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_11_2E_post_5F_attention_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ff985d5b9c84aa0baf19ad951ce9da95aed2c7ba958c4ab503b77aff89954947
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_12_2E_input_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:867af1746e67d54f567dde4f05da37b9526c69ee21a81ff2f3adc6b14ac096f3
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_12_2E_post_5F_attention_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5e3a7a338aa819b2bac639d2e37cadf2f9dad49e8c8e44707f4c3644cc4f10d9
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_13_2E_input_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5eabc8efe323212d99212c6f89bff77fabbb4062de234f268218b7990dda6641
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_13_2E_post_5F_attention_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c713981f34d132f00af47798bc292e2ccebb8b6af84791ddb401ee6ad95fffc
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_14_2E_input_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e02a6a02e27b095b5ffc70069ba5511fbe5a7b58341ec066cb3d05b01f94a7d
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_14_2E_post_5F_attention_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a6f83bda948b5b4d181b95858e12a243c9ac6ccc1a297e653748fed4d38509a
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_15_2E_input_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8f6575e9e0686f5d61b80c3698ba91ad6964f1fd18e557ce2ac6d0cd9ebcbff
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_15_2E_post_5F_attention_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78e5dbc2897ec4735979100f21fe9523127549f8a38bba223c5ea90999f901ee
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_16_2E_input_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60d1a69c0c65e671aef82038f7953f68c4030cc11b529510bd6c66a1551fdaa9
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_16_2E_post_5F_attention_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d35bde7f8f5cf0b616e545730dcb8b9ecffa0baf668d14e5c8e1d94521570ac1
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_17_2E_input_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8578916695daef28b8d636f2bcde67165917e833e4551846a6ad601e740e4d3b
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_17_2E_post_5F_attention_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56b9e95476c4a67550e58069d69348536a764a091497973d7322c03b3e3af7e5
3
+ size 4096
TinyLlama-1.1B-Chat-v0.3-fp16/model_2E_model_2E_layers_2E_18_2E_input_5F_layernorm_2E_weight.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62446ebb822c86a64203f1b34e56b97eb3035d1c332ed6c25de1e488cf00281d
3
+ size 4096