mihaimasala
commited on
Update README.md
Browse files
README.md
CHANGED
@@ -5,7 +5,7 @@ language:
|
|
5 |
base_model:
|
6 |
- OpenLLM-Ro/RoLlama2-7b-Base
|
7 |
model-index:
|
8 |
-
- name: OpenLLM-Ro/RoLlama2-7b-Instruct
|
9 |
results:
|
10 |
- task:
|
11 |
type: text-generation
|
@@ -557,6 +557,7 @@ print(tokenizer.decode(outputs[0]))
|
|
557 |
## Academic Benchmarks
|
558 |
|
559 |
|
|
|
560 |
<table>
|
561 |
<tbody>
|
562 |
<tr>
|
@@ -573,12 +574,14 @@ print(tokenizer.decode(outputs[0]))
|
|
573 |
<td>Llama-2-7b-chat</td><td><center>36.84</center></td><td><center>37.03</center></td><td><center>33.80</center></td><td><center>55.87</center></td><td><center>45.36</center></td><td><center>4.90</center></td><td><center>44.09</center></td>
|
574 |
</tr>
|
575 |
<tr>
|
576 |
-
<td><em>RoLlama2-7b-Instruct</em></td><td><center><em><strong>45.71</strong></em></center></td><td><center><em
|
|
|
|
|
|
|
577 |
</tr>
|
578 |
</tbody>
|
579 |
</table>
|
580 |
|
581 |
-
|
582 |
## Downstream tasks
|
583 |
|
584 |
|
@@ -611,7 +614,10 @@ print(tokenizer.decode(outputs[0]))
|
|
611 |
<td>Llama-2-7b-chat</td><td><center>87.78</center></td><td><center>52.81</center></td><td><center>97.27</center></td><td><center>82.02</center></td><td><center>15.55</center></td><td><center><strong>28.53</strong></center></td><td><center>19.99</center></td><td><center>31.48</center></td>
|
612 |
</tr>
|
613 |
<tr>
|
614 |
-
<td><em>RoLlama2-7b-Instruct</em></td><td><center><em
|
|
|
|
|
|
|
615 |
</tr>
|
616 |
</tbody>
|
617 |
</table>
|
@@ -646,11 +652,15 @@ print(tokenizer.decode(outputs[0]))
|
|
646 |
<td>Llama-2-7b-chat</td><td><center>32.35</center></td><td><center>54.00</center></td><td><center><strong>60.34</strong></center></td><td><center><strong>75.98</strong></center></td><td><center>32.56</center></td><td><center>31.99</center></td><td><center>74.08</center></td><td><center>72.64</center></td>
|
647 |
</tr>
|
648 |
<tr>
|
649 |
-
<td><em>RoLlama2-7b-Instruct</em></td><td><center><em
|
|
|
|
|
|
|
650 |
</tr>
|
651 |
</tbody>
|
652 |
</table>
|
653 |
|
|
|
654 |
## Romanian MT-Bench
|
655 |
|
656 |
<table>
|
@@ -666,7 +676,10 @@ print(tokenizer.decode(outputs[0]))
|
|
666 |
<td>Llama-2-7b-chat</td><td><center>1.08</center></td><td><center>1.44</center></td><td><center>0.73</center></td><td><center>45/160</center></td>
|
667 |
</tr>
|
668 |
<tr>
|
669 |
-
<td><em>RoLlama2-7b-Instruct</em></td><td><center><em
|
|
|
|
|
|
|
670 |
</tr>
|
671 |
</tbody>
|
672 |
</table>
|
@@ -686,19 +699,26 @@ print(tokenizer.decode(outputs[0]))
|
|
686 |
<td>Llama-2-7b-chat</td><td><center>1.21</center></td><td><center>33/100</center></td>
|
687 |
</tr>
|
688 |
<tr>
|
689 |
-
<td><em>RoLlama2-7b-Instruct</em></td><td><center><em
|
|
|
|
|
|
|
690 |
</tr>
|
691 |
</tbody>
|
692 |
</table>
|
693 |
|
694 |
|
695 |
|
|
|
|
|
696 |
## RoLlama2 Model Family
|
697 |
|
698 |
| Model | Link |
|
699 |
|--------------------|:--------:|
|
700 |
-
|RoLlama2-7b-Base | [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Base) |
|
701 |
-
|*RoLlama2-7b-Instruct
|
|
|
|
|
702 |
|
703 |
|
704 |
|
|
|
5 |
base_model:
|
6 |
- OpenLLM-Ro/RoLlama2-7b-Base
|
7 |
model-index:
|
8 |
+
- name: OpenLLM-Ro/RoLlama2-7b-Instruct-2024-05-14
|
9 |
results:
|
10 |
- task:
|
11 |
type: text-generation
|
|
|
557 |
## Academic Benchmarks
|
558 |
|
559 |
|
560 |
+
|
561 |
<table>
|
562 |
<tbody>
|
563 |
<tr>
|
|
|
574 |
<td>Llama-2-7b-chat</td><td><center>36.84</center></td><td><center>37.03</center></td><td><center>33.80</center></td><td><center>55.87</center></td><td><center>45.36</center></td><td><center>4.90</center></td><td><center>44.09</center></td>
|
575 |
</tr>
|
576 |
<tr>
|
577 |
+
<td><em>RoLlama2-7b-Instruct-2024-05-14</em></td><td><center><em><strong>45.71</strong></em></center></td><td><center><em>43.66</em></center></td><td><center><em>39.70</em></center></td><td><center><em><strong>70.34</strong></em></center></td><td><center><em>57.36</em></center></td><td><center><em><strong>18.78</strong></em></center></td><td><center><em>44.44</em></center></td>
|
578 |
+
</tr>
|
579 |
+
<tr>
|
580 |
+
<td>RoLlama2-7b-Instruct-2024-10-09</td><td><center>44.50</center></td><td><center><strong>44.73</strong></center></td><td><center><strong>40.39</strong></center></td><td><center>63.67</center></td><td><center><strong>59.12</strong></center></td><td><center>13.29</center></td><td><center><strong>45.78</strong></center></td>
|
581 |
</tr>
|
582 |
</tbody>
|
583 |
</table>
|
584 |
|
|
|
585 |
## Downstream tasks
|
586 |
|
587 |
|
|
|
614 |
<td>Llama-2-7b-chat</td><td><center>87.78</center></td><td><center>52.81</center></td><td><center>97.27</center></td><td><center>82.02</center></td><td><center>15.55</center></td><td><center><strong>28.53</strong></center></td><td><center>19.99</center></td><td><center>31.48</center></td>
|
615 |
</tr>
|
616 |
<tr>
|
617 |
+
<td><em>RoLlama2-7b-Instruct-2024-05-14</em></td><td><center><em>97.48</em></center></td><td><center><em><strong>65.26</strong></em></center></td><td><center><em><strong>98.83</strong></em></center></td><td><center><em><strong>87.28</strong></em></center></td><td><center><em><strong>27.38</strong></em></center></td><td><center><em>10.32</em></center></td><td><center><em>27.59</em></center></td><td><center><em><strong>40.13</strong></em></center></td>
|
618 |
+
</tr>
|
619 |
+
<tr>
|
620 |
+
<td>RoLlama2-7b-Instruct-2024-10-09</td><td><center><strong>97.66</strong></center></td><td><center>62.41</center></td><td><center>97.97</center></td><td><center>60.89</center></td><td><center>27.13</center></td><td><center>19.39</center></td><td><center><strong>27.63</strong></center></td><td><center>39.75</center></td>
|
621 |
</tr>
|
622 |
</tbody>
|
623 |
</table>
|
|
|
652 |
<td>Llama-2-7b-chat</td><td><center>32.35</center></td><td><center>54.00</center></td><td><center><strong>60.34</strong></center></td><td><center><strong>75.98</strong></center></td><td><center>32.56</center></td><td><center>31.99</center></td><td><center>74.08</center></td><td><center>72.64</center></td>
|
653 |
</tr>
|
654 |
<tr>
|
655 |
+
<td><em>RoLlama2-7b-Instruct-2024-05-14</em></td><td><center><em>44.52</em></center></td><td><center><em>64.75</em></center></td><td><center><em>54.96</em></center></td><td><center><em>70.20</em></center></td><td><center><em><strong>65.50</strong></em></center></td><td><center><em><strong>67.79</strong></em></center></td><td><center><em>84.44</em></center></td><td><center><em>84.76</em></center></td>
|
656 |
+
</tr>
|
657 |
+
<tr>
|
658 |
+
<td>RoLlama2-7b-Instruct-2024-10-09</td><td><center><strong>45.71</strong></center></td><td><center><strong>65.08</strong></center></td><td><center>59.24</center></td><td><center>74.25</center></td><td><center>59.69</center></td><td><center>57.16</center></td><td><center><strong>84.66</strong></center></td><td><center><strong>85.07</strong></center></td>
|
659 |
</tr>
|
660 |
</tbody>
|
661 |
</table>
|
662 |
|
663 |
+
|
664 |
## Romanian MT-Bench
|
665 |
|
666 |
<table>
|
|
|
676 |
<td>Llama-2-7b-chat</td><td><center>1.08</center></td><td><center>1.44</center></td><td><center>0.73</center></td><td><center>45/160</center></td>
|
677 |
</tr>
|
678 |
<tr>
|
679 |
+
<td><em>RoLlama2-7b-Instruct-2024-05-14</em></td><td><center><em>3.86</em></center></td><td><center><em>4.67</em></center></td><td><center><em>3.04</em></center></td><td><center><em><strong>160/160</strong></em></center></td>
|
680 |
+
</tr>
|
681 |
+
<tr>
|
682 |
+
<td>RoLlama2-7b-Instruct-2024-10-09</td><td><center><strong>4.43</strong></center></td><td><center><strong>4.92</strong></center></td><td><center><strong>3.94</strong></center></td><td><center><strong>160/160</strong></center></td>
|
683 |
</tr>
|
684 |
</tbody>
|
685 |
</table>
|
|
|
699 |
<td>Llama-2-7b-chat</td><td><center>1.21</center></td><td><center>33/100</center></td>
|
700 |
</tr>
|
701 |
<tr>
|
702 |
+
<td><em>RoLlama2-7b-Instruct-2024-05-14</em></td><td><center><em>3.77</em></center></td><td><center><em><strong>100/100</strong></em></center></td>
|
703 |
+
</tr>
|
704 |
+
<tr>
|
705 |
+
<td>RoLlama2-7b-Instruct-2024-10-09</td><td><center><strong>4.08</strong></center></td><td><center><strong>100/100</strong></center></td>
|
706 |
</tr>
|
707 |
</tbody>
|
708 |
</table>
|
709 |
|
710 |
|
711 |
|
712 |
+
|
713 |
+
|
714 |
## RoLlama2 Model Family
|
715 |
|
716 |
| Model | Link |
|
717 |
|--------------------|:--------:|
|
718 |
+
|RoLlama2-7b-Base-2024-05-14 | [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Base-2024-05-14) |
|
719 |
+
|*RoLlama2-7b-Instruct-2024-05-14* | [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Instruct-2024-05-14) |
|
720 |
+
|RoLlama2-7b-Instruct-2024-10-09| [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Instruct-2024-10-09) |
|
721 |
+
|RoLlama2-7b-Instruct-DPO-2024-10-09| [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Instruct-DPO-2024-10-09) |
|
722 |
|
723 |
|
724 |
|