|
|
|
# convert meta llama2 *.pth file to llama2.c *.bin file |
|
downloads/llama2.c $ python export_meta_llama_bin.py Downloads/llama2_7b_chat bins/llama2_7b_chat.bin |
|
{'dim': 4096, 'multiple_of': 256, 'n_heads': 32, 'n_layers': 32, 'norm_eps': 1e-06, 'vocab_size': -1} |
|
writing tok_embeddings... |
|
writing tok_embeddings.weight... |
|
writing layers.0.attention_norm.weight... |
|
writing layers.1.attention_norm.weight... |
|
writing layers.2.attention_norm.weight... |
|
writing layers.3.attention_norm.weight... |
|
writing layers.4.attention_norm.weight... |
|
writing layers.5.attention_norm.weight... |
|
writing layers.6.attention_norm.weight... |
|
writing layers.7.attention_norm.weight... |
|
writing layers.8.attention_norm.weight... |
|
writing layers.9.attention_norm.weight... |
|
writing layers.10.attention_norm.weight... |
|
writing layers.11.attention_norm.weight... |
|
writing layers.12.attention_norm.weight... |
|
writing layers.13.attention_norm.weight... |
|
writing layers.14.attention_norm.weight... |
|
writing layers.15.attention_norm.weight... |
|
writing layers.16.attention_norm.weight... |
|
writing layers.17.attention_norm.weight... |
|
writing layers.18.attention_norm.weight... |
|
writing layers.19.attention_norm.weight... |
|
writing layers.20.attention_norm.weight... |
|
writing layers.21.attention_norm.weight... |
|
writing layers.22.attention_norm.weight... |
|
writing layers.23.attention_norm.weight... |
|
writing layers.24.attention_norm.weight... |
|
writing layers.25.attention_norm.weight... |
|
writing layers.26.attention_norm.weight... |
|
writing layers.27.attention_norm.weight... |
|
writing layers.28.attention_norm.weight... |
|
writing layers.29.attention_norm.weight... |
|
writing layers.30.attention_norm.weight... |
|
writing layers.31.attention_norm.weight... |
|
writing layers.0.attention.wq.weight... |
|
writing layers.1.attention.wq.weight... |
|
writing layers.2.attention.wq.weight... |
|
writing layers.3.attention.wq.weight... |
|
writing layers.4.attention.wq.weight... |
|
writing layers.5.attention.wq.weight... |
|
writing layers.6.attention.wq.weight... |
|
writing layers.7.attention.wq.weight... |
|
writing layers.8.attention.wq.weight... |
|
writing layers.9.attention.wq.weight... |
|
writing layers.10.attention.wq.weight... |
|
writing layers.11.attention.wq.weight... |
|
writing layers.12.attention.wq.weight... |
|
writing layers.13.attention.wq.weight... |
|
writing layers.14.attention.wq.weight... |
|
writing layers.15.attention.wq.weight... |
|
writing layers.16.attention.wq.weight... |
|
writing layers.17.attention.wq.weight... |
|
writing layers.18.attention.wq.weight... |
|
writing layers.19.attention.wq.weight... |
|
writing layers.20.attention.wq.weight... |
|
writing layers.21.attention.wq.weight... |
|
writing layers.22.attention.wq.weight... |
|
writing layers.23.attention.wq.weight... |
|
writing layers.24.attention.wq.weight... |
|
writing layers.25.attention.wq.weight... |
|
writing layers.26.attention.wq.weight... |
|
writing layers.27.attention.wq.weight... |
|
writing layers.28.attention.wq.weight... |
|
writing layers.29.attention.wq.weight... |
|
writing layers.30.attention.wq.weight... |
|
writing layers.31.attention.wq.weight... |
|
writing layers.0.attention.wk.weight... |
|
writing layers.1.attention.wk.weight... |
|
writing layers.2.attention.wk.weight... |
|
writing layers.3.attention.wk.weight... |
|
writing layers.4.attention.wk.weight... |
|
writing layers.5.attention.wk.weight... |
|
writing layers.6.attention.wk.weight... |
|
writing layers.7.attention.wk.weight... |
|
writing layers.8.attention.wk.weight... |
|
writing layers.9.attention.wk.weight... |
|
writing layers.10.attention.wk.weight... |
|
writing layers.11.attention.wk.weight... |
|
writing layers.12.attention.wk.weight... |
|
writing layers.13.attention.wk.weight... |
|
writing layers.14.attention.wk.weight... |
|
writing layers.15.attention.wk.weight... |
|
writing layers.16.attention.wk.weight... |
|
writing layers.17.attention.wk.weight... |
|
writing layers.18.attention.wk.weight... |
|
writing layers.19.attention.wk.weight... |
|
writing layers.20.attention.wk.weight... |
|
writing layers.21.attention.wk.weight... |
|
writing layers.22.attention.wk.weight... |
|
writing layers.23.attention.wk.weight... |
|
writing layers.24.attention.wk.weight... |
|
writing layers.25.attention.wk.weight... |
|
writing layers.26.attention.wk.weight... |
|
writing layers.27.attention.wk.weight... |
|
writing layers.28.attention.wk.weight... |
|
writing layers.29.attention.wk.weight... |
|
writing layers.30.attention.wk.weight... |
|
writing layers.31.attention.wk.weight... |
|
writing layers.0.attention.wv.weight... |
|
writing layers.1.attention.wv.weight... |
|
writing layers.2.attention.wv.weight... |
|
writing layers.3.attention.wv.weight... |
|
writing layers.4.attention.wv.weight... |
|
writing layers.5.attention.wv.weight... |
|
writing layers.6.attention.wv.weight... |
|
writing layers.7.attention.wv.weight... |
|
writing layers.8.attention.wv.weight... |
|
writing layers.9.attention.wv.weight... |
|
writing layers.10.attention.wv.weight... |
|
writing layers.11.attention.wv.weight... |
|
writing layers.12.attention.wv.weight... |
|
writing layers.13.attention.wv.weight... |
|
writing layers.14.attention.wv.weight... |
|
writing layers.15.attention.wv.weight... |
|
writing layers.16.attention.wv.weight... |
|
writing layers.17.attention.wv.weight... |
|
writing layers.18.attention.wv.weight... |
|
writing layers.19.attention.wv.weight... |
|
writing layers.20.attention.wv.weight... |
|
writing layers.21.attention.wv.weight... |
|
writing layers.22.attention.wv.weight... |
|
writing layers.23.attention.wv.weight... |
|
writing layers.24.attention.wv.weight... |
|
writing layers.25.attention.wv.weight... |
|
writing layers.26.attention.wv.weight... |
|
writing layers.27.attention.wv.weight... |
|
writing layers.28.attention.wv.weight... |
|
writing layers.29.attention.wv.weight... |
|
writing layers.30.attention.wv.weight... |
|
writing layers.31.attention.wv.weight... |
|
writing layers.0.attention.wo.weight... |
|
writing layers.1.attention.wo.weight... |
|
writing layers.2.attention.wo.weight... |
|
writing layers.3.attention.wo.weight... |
|
writing layers.4.attention.wo.weight... |
|
writing layers.5.attention.wo.weight... |
|
writing layers.6.attention.wo.weight... |
|
writing layers.7.attention.wo.weight... |
|
writing layers.8.attention.wo.weight... |
|
writing layers.9.attention.wo.weight... |
|
writing layers.10.attention.wo.weight... |
|
writing layers.11.attention.wo.weight... |
|
writing layers.12.attention.wo.weight... |
|
writing layers.13.attention.wo.weight... |
|
writing layers.14.attention.wo.weight... |
|
writing layers.15.attention.wo.weight... |
|
writing layers.16.attention.wo.weight... |
|
writing layers.17.attention.wo.weight... |
|
writing layers.18.attention.wo.weight... |
|
writing layers.19.attention.wo.weight... |
|
writing layers.20.attention.wo.weight... |
|
writing layers.21.attention.wo.weight... |
|
writing layers.22.attention.wo.weight... |
|
writing layers.23.attention.wo.weight... |
|
writing layers.24.attention.wo.weight... |
|
writing layers.25.attention.wo.weight... |
|
writing layers.26.attention.wo.weight... |
|
writing layers.27.attention.wo.weight... |
|
writing layers.28.attention.wo.weight... |
|
writing layers.29.attention.wo.weight... |
|
writing layers.30.attention.wo.weight... |
|
writing layers.31.attention.wo.weight... |
|
writing layers.0.ffn_norm.weight... |
|
writing layers.1.ffn_norm.weight... |
|
writing layers.2.ffn_norm.weight... |
|
writing layers.3.ffn_norm.weight... |
|
writing layers.4.ffn_norm.weight... |
|
writing layers.5.ffn_norm.weight... |
|
writing layers.6.ffn_norm.weight... |
|
writing layers.7.ffn_norm.weight... |
|
writing layers.8.ffn_norm.weight... |
|
writing layers.9.ffn_norm.weight... |
|
writing layers.10.ffn_norm.weight... |
|
writing layers.11.ffn_norm.weight... |
|
writing layers.12.ffn_norm.weight... |
|
writing layers.13.ffn_norm.weight... |
|
writing layers.14.ffn_norm.weight... |
|
writing layers.15.ffn_norm.weight... |
|
writing layers.16.ffn_norm.weight... |
|
writing layers.17.ffn_norm.weight... |
|
writing layers.18.ffn_norm.weight... |
|
writing layers.19.ffn_norm.weight... |
|
writing layers.20.ffn_norm.weight... |
|
writing layers.21.ffn_norm.weight... |
|
writing layers.22.ffn_norm.weight... |
|
writing layers.23.ffn_norm.weight... |
|
writing layers.24.ffn_norm.weight... |
|
writing layers.25.ffn_norm.weight... |
|
writing layers.26.ffn_norm.weight... |
|
writing layers.27.ffn_norm.weight... |
|
writing layers.28.ffn_norm.weight... |
|
writing layers.29.ffn_norm.weight... |
|
writing layers.30.ffn_norm.weight... |
|
writing layers.31.ffn_norm.weight... |
|
writing layers.0.feed_forward.w1.weight... |
|
writing layers.1.feed_forward.w1.weight... |
|
writing layers.2.feed_forward.w1.weight... |
|
writing layers.3.feed_forward.w1.weight... |
|
writing layers.4.feed_forward.w1.weight... |
|
writing layers.5.feed_forward.w1.weight... |
|
writing layers.6.feed_forward.w1.weight... |
|
writing layers.7.feed_forward.w1.weight... |
|
writing layers.8.feed_forward.w1.weight... |
|
writing layers.9.feed_forward.w1.weight... |
|
writing layers.10.feed_forward.w1.weight... |
|
writing layers.11.feed_forward.w1.weight... |
|
writing layers.12.feed_forward.w1.weight... |
|
writing layers.13.feed_forward.w1.weight... |
|
writing layers.14.feed_forward.w1.weight... |
|
writing layers.15.feed_forward.w1.weight... |
|
writing layers.16.feed_forward.w1.weight... |
|
writing layers.17.feed_forward.w1.weight... |
|
writing layers.18.feed_forward.w1.weight... |
|
writing layers.19.feed_forward.w1.weight... |
|
writing layers.20.feed_forward.w1.weight... |
|
writing layers.21.feed_forward.w1.weight... |
|
writing layers.22.feed_forward.w1.weight... |
|
writing layers.23.feed_forward.w1.weight... |
|
writing layers.24.feed_forward.w1.weight... |
|
writing layers.25.feed_forward.w1.weight... |
|
writing layers.26.feed_forward.w1.weight... |
|
writing layers.27.feed_forward.w1.weight... |
|
writing layers.28.feed_forward.w1.weight... |
|
writing layers.29.feed_forward.w1.weight... |
|
writing layers.30.feed_forward.w1.weight... |
|
writing layers.31.feed_forward.w1.weight... |
|
writing layers.0.feed_forward.w2.weight... |
|
writing layers.1.feed_forward.w2.weight... |
|
writing layers.2.feed_forward.w2.weight... |
|
writing layers.3.feed_forward.w2.weight... |
|
writing layers.4.feed_forward.w2.weight... |
|
writing layers.5.feed_forward.w2.weight... |
|
writing layers.6.feed_forward.w2.weight... |
|
writing layers.7.feed_forward.w2.weight... |
|
writing layers.8.feed_forward.w2.weight... |
|
writing layers.9.feed_forward.w2.weight... |
|
writing layers.10.feed_forward.w2.weight... |
|
writing layers.11.feed_forward.w2.weight... |
|
writing layers.12.feed_forward.w2.weight... |
|
writing layers.13.feed_forward.w2.weight... |
|
writing layers.14.feed_forward.w2.weight... |
|
writing layers.15.feed_forward.w2.weight... |
|
writing layers.16.feed_forward.w2.weight... |
|
writing layers.17.feed_forward.w2.weight... |
|
writing layers.18.feed_forward.w2.weight... |
|
writing layers.19.feed_forward.w2.weight... |
|
writing layers.20.feed_forward.w2.weight... |
|
writing layers.21.feed_forward.w2.weight... |
|
writing layers.22.feed_forward.w2.weight... |
|
writing layers.23.feed_forward.w2.weight... |
|
writing layers.24.feed_forward.w2.weight... |
|
writing layers.25.feed_forward.w2.weight... |
|
writing layers.26.feed_forward.w2.weight... |
|
writing layers.27.feed_forward.w2.weight... |
|
writing layers.28.feed_forward.w2.weight... |
|
writing layers.29.feed_forward.w2.weight... |
|
writing layers.30.feed_forward.w2.weight... |
|
writing layers.31.feed_forward.w2.weight... |
|
writing layers.0.feed_forward.w3.weight... |
|
writing layers.1.feed_forward.w3.weight... |
|
writing layers.2.feed_forward.w3.weight... |
|
writing layers.3.feed_forward.w3.weight... |
|
writing layers.4.feed_forward.w3.weight... |
|
writing layers.5.feed_forward.w3.weight... |
|
writing layers.6.feed_forward.w3.weight... |
|
writing layers.7.feed_forward.w3.weight... |
|
writing layers.8.feed_forward.w3.weight... |
|
writing layers.9.feed_forward.w3.weight... |
|
writing layers.10.feed_forward.w3.weight... |
|
writing layers.11.feed_forward.w3.weight... |
|
writing layers.12.feed_forward.w3.weight... |
|
writing layers.13.feed_forward.w3.weight... |
|
writing layers.14.feed_forward.w3.weight... |
|
writing layers.15.feed_forward.w3.weight... |
|
writing layers.16.feed_forward.w3.weight... |
|
writing layers.17.feed_forward.w3.weight... |
|
writing layers.18.feed_forward.w3.weight... |
|
writing layers.19.feed_forward.w3.weight... |
|
writing layers.20.feed_forward.w3.weight... |
|
writing layers.21.feed_forward.w3.weight... |
|
writing layers.22.feed_forward.w3.weight... |
|
writing layers.23.feed_forward.w3.weight... |
|
writing layers.24.feed_forward.w3.weight... |
|
writing layers.25.feed_forward.w3.weight... |
|
writing layers.26.feed_forward.w3.weight... |
|
writing layers.27.feed_forward.w3.weight... |
|
writing layers.28.feed_forward.w3.weight... |
|
writing layers.29.feed_forward.w3.weight... |
|
writing layers.30.feed_forward.w3.weight... |
|
writing layers.31.feed_forward.w3.weight... |
|
writing norm.weight... |
|
writing freqs_cos... |
|
writing freqs_sin... |
|
writing output.weight... |
|
wrote bins/llama2_7b_chat.bin |
|
|
|
# quantize |
|
temp/llama2.c $ ./quantize ../../downloads/llama2.c/bins/llama2_7b_chat.bin |
|
vocab size = 32000 shared_weights=0 |
|
Model file size = 25706MB |
|
------------------------ |
|
token_embedding_table layer_size=131072000 |
|
l=0 min=-0.133789 max=0.259766 scale=0.001543 |
|
------------------------ |
|
rms_att_weight layer_size=4096 |
|
l=0 min=-0.000618 max=0.792969 scale=0.003112 |
|
l=1 min=0.000021 max=0.486328 scale=0.001907 |
|
l=2 min=0.000002 max=0.492188 scale=0.001930 |
|
l=3 min=-0.000156 max=0.589844 scale=0.002314 |
|
l=4 min=0.000060 max=0.628906 scale=0.002466 |
|
l=5 min=-0.000004 max=0.570312 scale=0.002237 |
|
l=6 min=0.000095 max=0.652344 scale=0.002558 |
|
l=7 min=0.000043 max=0.671875 scale=0.002635 |
|
l=8 min=-0.000028 max=0.707031 scale=0.002773 |
|
l=9 min=0.000007 max=0.707031 scale=0.002773 |
|
l=10 min=-0.000006 max=0.734375 scale=0.002880 |
|
l=11 min=0.000001 max=0.785156 scale=0.003079 |
|
l=12 min=0.000078 max=0.777344 scale=0.003048 |
|
l=13 min=-0.000062 max=0.710938 scale=0.002788 |
|
l=14 min=-0.000017 max=0.785156 scale=0.003079 |
|
l=15 min=0.025513 max=0.839844 scale=0.003193 |
|
l=16 min=0.022461 max=0.828125 scale=0.003159 |
|
l=17 min=0.000095 max=0.785156 scale=0.003079 |
|
l=18 min=0.000050 max=0.867188 scale=0.003401 |
|
l=19 min=-0.000074 max=0.843750 scale=0.003309 |
|
l=20 min=-0.000017 max=0.847656 scale=0.003324 |
|
l=21 min=-0.000028 max=0.859375 scale=0.003370 |
|
l=22 min=-0.000089 max=1.015625 scale=0.003983 |
|
l=23 min=-0.000008 max=0.992188 scale=0.003891 |
|
l=24 min=0.000128 max=1.015625 scale=0.003982 |
|
l=25 min=-0.000007 max=1.007812 scale=0.003952 |
|
l=26 min=-0.000165 max=1.164062 scale=0.004566 |
|
l=27 min=-0.000039 max=1.187500 scale=0.004657 |
|
l=28 min=-0.000074 max=1.140625 scale=0.004473 |
|
l=29 min=0.010864 max=1.148438 scale=0.004461 |
|
l=30 min=0.000046 max=1.117188 scale=0.004381 |
|
l=31 min=0.082031 max=1.109375 scale=0.004029 |
|
------------------------ |
|
wq layer_size=16777216 |
|
l=0 min=-0.773438 max=0.718750 scale=0.005852 |
|
l=1 min=-0.402344 max=0.468750 scale=0.003416 |
|
l=2 min=-0.746094 max=1.085938 scale=0.007184 |
|
l=3 min=-0.683594 max=0.773438 scale=0.005714 |
|
l=4 min=-0.671875 max=0.632812 scale=0.005116 |
|
l=5 min=-0.589844 max=0.636719 scale=0.004810 |
|
l=6 min=-0.574219 max=0.613281 scale=0.004657 |
|
l=7 min=-0.562500 max=0.753906 scale=0.005162 |
|
l=8 min=-0.554688 max=0.466797 scale=0.004006 |
|
l=9 min=-0.773438 max=0.437500 scale=0.004749 |
|
l=10 min=-0.460938 max=0.460938 scale=0.003615 |
|
l=11 min=-0.416016 max=0.601562 scale=0.003991 |
|
l=12 min=-0.458984 max=0.523438 scale=0.003853 |
|
l=13 min=-0.417969 max=0.679688 scale=0.004305 |
|
l=14 min=-0.566406 max=0.539062 scale=0.004335 |
|
l=15 min=-0.558594 max=0.578125 scale=0.004458 |
|
l=16 min=-0.992188 max=0.753906 scale=0.006847 |
|
l=17 min=-0.792969 max=0.937500 scale=0.006786 |
|
l=18 min=-0.769531 max=0.675781 scale=0.005668 |
|
l=19 min=-0.714844 max=0.664062 scale=0.005407 |
|
l=20 min=-1.046875 max=0.878906 scale=0.007552 |
|
l=21 min=-0.707031 max=0.933594 scale=0.006434 |
|
l=22 min=-0.582031 max=0.871094 scale=0.005699 |
|
l=23 min=-0.808594 max=0.683594 scale=0.005852 |
|
l=24 min=-0.593750 max=1.023438 scale=0.006342 |
|
l=25 min=-0.636719 max=0.574219 scale=0.004749 |
|
l=26 min=-0.859375 max=0.687500 scale=0.006066 |
|
l=27 min=-0.785156 max=0.703125 scale=0.005836 |
|
l=28 min=-0.769531 max=0.621094 scale=0.005453 |
|
l=29 min=-0.734375 max=0.808594 scale=0.006051 |
|
l=30 min=-0.742188 max=0.808594 scale=0.006081 |
|
l=31 min=-0.664062 max=0.414062 scale=0.004228 |
|
------------------------ |
|
wk layer_size=16777216 |
|
l=0 min=-0.816406 max=0.699219 scale=0.005944 |
|
l=1 min=-0.515625 max=0.566406 scale=0.004243 |
|
l=2 min=-0.341797 max=0.371094 scale=0.002796 |
|
l=3 min=-0.406250 max=0.320312 scale=0.002849 |
|
l=4 min=-0.306641 max=0.507812 scale=0.003194 |
|
l=5 min=-0.330078 max=0.330078 scale=0.002589 |
|
l=6 min=-0.263672 max=0.304688 scale=0.002229 |
|
l=7 min=-0.251953 max=0.314453 scale=0.002221 |
|
l=8 min=-0.306641 max=0.291016 scale=0.002344 |
|
l=9 min=-0.351562 max=0.263672 scale=0.002413 |
|
l=10 min=-0.267578 max=0.289062 scale=0.002183 |
|
l=11 min=-0.275391 max=0.269531 scale=0.002137 |
|
l=12 min=-0.296875 max=0.255859 scale=0.002168 |
|
l=13 min=-0.261719 max=0.296875 scale=0.002191 |
|
l=14 min=-0.312500 max=0.250000 scale=0.002206 |
|
l=15 min=-0.251953 max=0.255859 scale=0.001991 |
|
l=16 min=-0.287109 max=0.271484 scale=0.002191 |
|
l=17 min=-0.302734 max=0.277344 scale=0.002275 |
|
l=18 min=-0.310547 max=0.308594 scale=0.002428 |
|
l=19 min=-0.296875 max=0.285156 scale=0.002282 |
|
l=20 min=-0.326172 max=0.324219 scale=0.002551 |
|
l=21 min=-0.306641 max=0.369141 scale=0.002650 |
|
l=22 min=-0.314453 max=0.283203 scale=0.002344 |
|
l=23 min=-0.287109 max=0.308594 scale=0.002336 |
|
l=24 min=-0.328125 max=0.294922 scale=0.002443 |
|
l=25 min=-0.279297 max=0.361328 scale=0.002512 |
|
l=26 min=-0.341797 max=0.353516 scale=0.002727 |
|
l=27 min=-0.277344 max=0.287109 scale=0.002214 |
|
l=28 min=-0.365234 max=0.322266 scale=0.002696 |
|
l=29 min=-0.365234 max=0.326172 scale=0.002711 |
|
l=30 min=-0.343750 max=0.365234 scale=0.002780 |
|
l=31 min=-0.396484 max=0.400391 scale=0.003125 |
|
------------------------ |
|
wv layer_size=16777216 |
|
l=0 min=-0.123047 max=0.131836 scale=0.001000 |
|
l=1 min=-0.125000 max=0.105957 scale=0.000906 |
|
l=2 min=-0.126953 max=0.146484 scale=0.001072 |
|
l=3 min=-0.114746 max=0.137695 scale=0.000990 |
|
l=4 min=-0.139648 max=0.119141 scale=0.001015 |
|
l=5 min=-0.263672 max=0.165039 scale=0.001681 |
|
l=6 min=-0.285156 max=0.275391 scale=0.002198 |
|
l=7 min=-0.223633 max=0.221680 scale=0.001746 |
|
l=8 min=-0.206055 max=0.188477 scale=0.001547 |
|
l=9 min=-0.242188 max=0.203125 scale=0.001746 |
|
l=10 min=-0.202148 max=0.257812 scale=0.001804 |
|
l=11 min=-0.296875 max=0.265625 scale=0.002206 |
|
l=12 min=-0.187500 max=0.249023 scale=0.001712 |
|
l=13 min=-0.228516 max=0.220703 scale=0.001762 |
|
l=14 min=-0.173828 max=0.332031 scale=0.001984 |
|
l=15 min=-0.199219 max=0.225586 scale=0.001666 |
|
l=16 min=-0.203125 max=0.241211 scale=0.001742 |
|
l=17 min=-0.193359 max=0.211914 scale=0.001589 |
|
l=18 min=-0.224609 max=0.200195 scale=0.001666 |
|
l=19 min=-0.136719 max=0.222656 scale=0.001409 |
|
l=20 min=-0.162109 max=0.125977 scale=0.001130 |
|
l=21 min=-0.170898 max=0.178711 scale=0.001371 |
|
l=22 min=-0.151367 max=0.100098 scale=0.000986 |
|
l=23 min=-0.151367 max=0.136719 scale=0.001130 |
|
l=24 min=-0.112305 max=0.181641 scale=0.001153 |
|
l=25 min=-0.125000 max=0.125000 scale=0.000980 |
|
l=26 min=-0.178711 max=0.196289 scale=0.001471 |
|
l=27 min=-0.118652 max=0.127930 scale=0.000967 |
|
l=28 min=-0.163086 max=0.167969 scale=0.001298 |
|
l=29 min=-0.130859 max=0.122559 scale=0.000994 |
|
l=30 min=-0.431641 max=0.498047 scale=0.003646 |
|
l=31 min=-0.386719 max=0.378906 scale=0.003002 |
|
------------------------ |
|
wo layer_size=16777216 |
|
l=0 min=-0.447266 max=0.412109 scale=0.003370 |
|
l=1 min=-0.490234 max=0.585938 scale=0.004220 |
|
l=2 min=-0.601562 max=0.515625 scale=0.004381 |
|
l=3 min=-0.535156 max=0.466797 scale=0.003929 |
|
l=4 min=-0.617188 max=0.523438 scale=0.004473 |
|
l=5 min=-0.523438 max=0.443359 scale=0.003791 |
|
l=6 min=-0.597656 max=0.531250 scale=0.004427 |
|
l=7 min=-0.554688 max=0.402344 scale=0.003753 |
|
l=8 min=-0.605469 max=0.750000 scale=0.005316 |
|
l=9 min=-0.539062 max=0.503906 scale=0.004090 |
|
l=10 min=-0.410156 max=0.400391 scale=0.003179 |
|
l=11 min=-0.535156 max=0.570312 scale=0.004335 |
|
l=12 min=-0.511719 max=0.496094 scale=0.003952 |
|
l=13 min=-0.460938 max=0.503906 scale=0.003784 |
|
l=14 min=-0.589844 max=0.535156 scale=0.004412 |
|
l=15 min=-0.480469 max=0.380859 scale=0.003378 |
|
l=16 min=-0.423828 max=0.392578 scale=0.003202 |
|
l=17 min=-0.486328 max=0.494141 scale=0.003845 |
|
l=18 min=-0.554688 max=0.562500 scale=0.004381 |
|
l=19 min=-0.597656 max=0.671875 scale=0.004979 |
|
l=20 min=-0.574219 max=0.566406 scale=0.004473 |
|
l=21 min=-0.546875 max=0.535156 scale=0.004243 |
|
l=22 min=-0.734375 max=0.769531 scale=0.005898 |
|
l=23 min=-0.482422 max=0.480469 scale=0.003776 |
|
l=24 min=-0.726562 max=0.648438 scale=0.005392 |
|
l=25 min=-0.437500 max=0.648438 scale=0.004259 |
|
l=26 min=-0.683594 max=0.625000 scale=0.005132 |
|
l=27 min=-0.554688 max=0.621094 scale=0.004611 |
|
l=28 min=-0.777344 max=0.388672 scale=0.004573 |
|
l=29 min=-0.574219 max=0.613281 scale=0.004657 |
|
l=30 min=-0.515625 max=0.621094 scale=0.004458 |
|
l=31 min=-1.257812 max=1.296875 scale=0.010018 |
|
------------------------ |
|
rms_ffn_weight layer_size=4096 |
|
l=0 min=-0.000267 max=0.218750 scale=0.000859 |
|
l=1 min=0.000063 max=0.195312 scale=0.000766 |
|
l=2 min=-0.000166 max=0.150391 scale=0.000590 |
|
l=3 min=-0.000036 max=0.184570 scale=0.000724 |
|
l=4 min=0.022583 max=0.197266 scale=0.000685 |
|
l=5 min=0.020508 max=0.213867 scale=0.000758 |
|
l=6 min=0.037598 max=0.227539 scale=0.000745 |
|
l=7 min=0.046631 max=0.243164 scale=0.000771 |
|
l=8 min=0.051758 max=0.251953 scale=0.000785 |
|
l=9 min=0.053467 max=0.253906 scale=0.000786 |
|
l=10 min=0.056641 max=0.259766 scale=0.000797 |
|
l=11 min=0.052734 max=0.263672 scale=0.000827 |
|
l=12 min=0.062500 max=0.273438 scale=0.000827 |
|
l=13 min=0.062256 max=0.281250 scale=0.000859 |
|
l=14 min=0.058594 max=0.289062 scale=0.000904 |
|
l=15 min=0.068848 max=0.302734 scale=0.000917 |
|
l=16 min=0.065918 max=0.320312 scale=0.000998 |
|
l=17 min=0.051514 max=0.339844 scale=0.001131 |
|
l=18 min=0.059082 max=0.355469 scale=0.001162 |
|
l=19 min=0.064941 max=0.367188 scale=0.001185 |
|
l=20 min=0.051514 max=0.380859 scale=0.001292 |
|
l=21 min=0.049805 max=0.392578 scale=0.001344 |
|
l=22 min=0.046875 max=0.402344 scale=0.001394 |
|
l=23 min=0.051514 max=0.414062 scale=0.001422 |
|
l=24 min=0.050049 max=0.429688 scale=0.001489 |
|
l=25 min=0.065918 max=0.443359 scale=0.001480 |
|
l=26 min=0.050049 max=0.455078 scale=0.001588 |
|
l=27 min=0.046387 max=0.470703 scale=0.001664 |
|
l=28 min=0.061279 max=0.492188 scale=0.001690 |
|
l=29 min=0.059326 max=0.554688 scale=0.001943 |
|
l=30 min=0.087891 max=0.609375 scale=0.002045 |
|
l=31 min=0.112793 max=0.675781 scale=0.002208 |
|
------------------------ |
|
w1 layer_size=45088768 |
|
l=0 min=-0.882812 max=0.718750 scale=0.006281 |
|
l=1 min=-1.085938 max=0.964844 scale=0.008042 |
|
l=2 min=-0.523438 max=0.414062 scale=0.003676 |
|
l=3 min=-0.445312 max=0.570312 scale=0.003983 |
|
l=4 min=-0.515625 max=0.445312 scale=0.003768 |
|
l=5 min=-0.439453 max=0.332031 scale=0.003025 |
|
l=6 min=-0.367188 max=0.414062 scale=0.003064 |
|
l=7 min=-0.449219 max=0.421875 scale=0.003416 |
|
l=8 min=-0.386719 max=0.314453 scale=0.002750 |
|
l=9 min=-0.355469 max=0.373047 scale=0.002857 |
|
l=10 min=-0.523438 max=0.369141 scale=0.003500 |
|
l=11 min=-0.400391 max=0.310547 scale=0.002788 |
|
l=12 min=-0.335938 max=0.353516 scale=0.002704 |
|
l=13 min=-0.314453 max=0.365234 scale=0.002665 |
|
l=14 min=-0.316406 max=0.339844 scale=0.002574 |
|
l=15 min=-0.306641 max=0.427734 scale=0.002880 |
|
l=16 min=-0.468750 max=0.367188 scale=0.003278 |
|
l=17 min=-0.402344 max=0.341797 scale=0.002918 |
|
l=18 min=-0.357422 max=0.367188 scale=0.002842 |
|
l=19 min=-0.386719 max=0.349609 scale=0.002888 |
|
l=20 min=-0.318359 max=0.285156 scale=0.002367 |
|
l=21 min=-0.294922 max=0.285156 scale=0.002275 |
|
l=22 min=-0.339844 max=0.369141 scale=0.002780 |
|
l=23 min=-0.357422 max=0.412109 scale=0.003018 |
|
l=24 min=-0.343750 max=0.300781 scale=0.002528 |
|
l=25 min=-0.550781 max=0.449219 scale=0.003922 |
|
l=26 min=-0.408203 max=0.298828 scale=0.002773 |
|
l=27 min=-0.632812 max=0.298828 scale=0.003653 |
|
l=28 min=-0.388672 max=0.283203 scale=0.002635 |
|
l=29 min=-0.375000 max=0.460938 scale=0.003278 |
|
l=30 min=-0.789062 max=0.703125 scale=0.005852 |
|
l=31 min=-0.375000 max=0.306641 scale=0.002673 |
|
------------------------ |
|
w2 layer_size=45088768 |
|
l=0 min=-0.507812 max=0.523438 scale=0.004044 |
|
l=1 min=-1.414062 max=1.546875 scale=0.011612 |
|
l=2 min=-0.664062 max=0.890625 scale=0.006097 |
|
l=3 min=-0.660156 max=0.675781 scale=0.005239 |
|
l=4 min=-0.964844 max=0.800781 scale=0.006924 |
|
l=5 min=-0.726562 max=0.554688 scale=0.005025 |
|
l=6 min=-0.914062 max=0.718750 scale=0.006403 |
|
l=7 min=-0.937500 max=0.703125 scale=0.006434 |
|
l=8 min=-0.792969 max=0.753906 scale=0.006066 |
|
l=9 min=-0.660156 max=0.632812 scale=0.005070 |
|
l=10 min=-0.738281 max=1.015625 scale=0.006878 |
|
l=11 min=-0.757812 max=0.636719 scale=0.005469 |
|
l=12 min=-0.585938 max=0.644531 scale=0.004825 |
|
l=13 min=-0.609375 max=1.062500 scale=0.006556 |
|
l=14 min=-0.570312 max=0.656250 scale=0.004810 |
|
l=15 min=-1.054688 max=0.625000 scale=0.006587 |
|
l=16 min=-0.765625 max=0.472656 scale=0.004856 |
|
l=17 min=-1.132812 max=0.542969 scale=0.006572 |
|
l=18 min=-1.093750 max=0.648438 scale=0.006832 |
|
l=19 min=-0.957031 max=0.675781 scale=0.006403 |
|
l=20 min=-0.550781 max=1.093750 scale=0.006449 |
|
l=21 min=-0.707031 max=0.730469 scale=0.005637 |
|
l=22 min=-0.388672 max=0.324219 scale=0.002796 |
|
l=23 min=-0.597656 max=1.304688 scale=0.007460 |
|
l=24 min=-0.656250 max=0.718750 scale=0.005392 |
|
l=25 min=-1.015625 max=0.314453 scale=0.005216 |
|
l=26 min=-0.392578 max=0.574219 scale=0.003791 |
|
l=27 min=-0.757812 max=0.498047 scale=0.004925 |
|
l=28 min=-0.992188 max=0.562500 scale=0.006097 |
|
l=29 min=-0.976562 max=0.492188 scale=0.005760 |
|
l=30 min=-1.296875 max=1.039062 scale=0.009161 |
|
l=31 min=-1.390625 max=1.765625 scale=0.012377 |
|
------------------------ |
|
w3 layer_size=45088768 |
|
l=0 min=-0.337891 max=0.310547 scale=0.002543 |
|
l=1 min=-0.400391 max=0.289062 scale=0.002704 |
|
l=2 min=-0.310547 max=0.453125 scale=0.002995 |
|
l=3 min=-0.447266 max=0.498047 scale=0.003707 |
|
l=4 min=-0.472656 max=0.351562 scale=0.003232 |
|
l=5 min=-0.484375 max=0.417969 scale=0.003539 |
|
l=6 min=-0.333984 max=0.562500 scale=0.003516 |
|
l=7 min=-0.333984 max=0.351562 scale=0.002688 |
|
l=8 min=-0.433594 max=0.644531 scale=0.004228 |
|
l=9 min=-0.376953 max=0.605469 scale=0.003853 |
|
l=10 min=-0.388672 max=0.233398 scale=0.002439 |
|
l=11 min=-0.419922 max=0.414062 scale=0.003271 |
|
l=12 min=-0.238281 max=0.242188 scale=0.001884 |
|
l=13 min=-0.427734 max=0.412109 scale=0.003294 |
|
l=14 min=-0.218750 max=0.470703 scale=0.002704 |
|
l=15 min=-0.300781 max=0.435547 scale=0.002888 |
|
l=16 min=-0.457031 max=0.361328 scale=0.003209 |
|
l=17 min=-0.271484 max=0.335938 scale=0.002382 |
|
l=18 min=-0.167969 max=0.324219 scale=0.001930 |
|
l=19 min=-0.566406 max=0.482422 scale=0.004113 |
|
l=20 min=-0.312500 max=0.304688 scale=0.002420 |
|
l=21 min=-0.229492 max=0.196289 scale=0.001670 |
|
l=22 min=-0.170898 max=0.269531 scale=0.001727 |
|
l=23 min=-0.296875 max=0.224609 scale=0.002045 |
|
l=24 min=-0.194336 max=0.196289 scale=0.001532 |
|
l=25 min=-0.240234 max=0.251953 scale=0.001930 |
|
l=26 min=-0.363281 max=0.257812 scale=0.002436 |
|
l=27 min=-0.335938 max=0.219727 scale=0.002179 |
|
l=28 min=-0.367188 max=0.273438 scale=0.002512 |
|
l=29 min=-0.503906 max=0.542969 scale=0.004105 |
|
l=30 min=-0.769531 max=0.707031 scale=0.005790 |
|
l=31 min=-0.753906 max=0.574219 scale=0.005208 |
|
------------------------ |
|
rms_final_weight layer_size=4096 |
|
l=0 min=0.003387 max=2.843750 scale=0.011139 |
|
model converted and saved to data.bin |