mihaimasala commited on
Commit
9586229
·
verified ·
1 Parent(s): e1bb977

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +6 -6
README.md CHANGED
@@ -540,8 +540,8 @@ Use the code below to get started with the model.
540
  ```python
541
  from transformers import AutoTokenizer, AutoModelForCausalLM
542
 
543
- tokenizer = AutoTokenizer.from_pretrained("OpenLLM-Ro/RoLlama3.1-8b")
544
- model = AutoModelForCausalLM.from_pretrained("OpenLLM-Ro/RoLlama3.1-8b")
545
 
546
  instruction = "Ce jocuri de societate pot juca cu prietenii mei?"
547
  chat = [
@@ -611,13 +611,13 @@ print(tokenizer.decode(outputs[0]))
611
  <td><center><strong>RO-EN<br>(Bleu)</strong></center>
612
  </tr>
613
  <tr>
614
- <td>Llama-3.1-8B-Instruct</td><td><center><strong>95.74</strong></center></td><td><center>59.49</center></td><td><center><strong>98.57</strong></center></td><td><center>82.41</center></td><td><center>19.01</center></td><td><center><strong>27.77</strong></center></td><td><center><strong>29.02</strong></center></td><td><center>39.80</center></td>
615
  </tr>
616
  <tr>
617
  <td><em>RoLlama3.1-8b-Instruct-2024-10-09</em></td><td><center><em>94.56</em></center></td><td><center><em><strong>60.10</strong></em></center></td><td><center><em>95.12</em></center></td><td><center><em><strong>87.53</strong></em></center></td><td><center><em><strong>21.88</strong></em></center></td><td><center><em>23.99</em></center></td><td><center><em>28.27</em></center></td><td><center><em><strong>40.44</strong></em></center></td>
618
  </tr>
619
  <tr>
620
- <td>RoLlama3.1-8b-Instruct-DPO-2024-10-09</td><td><center>-</center></td><td><center>-</center></td><td><center>-</center></td><td><center>-</center></td><td><center>-</center></td><td><center>-</center></td><td><center>-</center></td><td><center>-</center></td>
621
  </tr>
622
  </tbody>
623
  </table>
@@ -652,10 +652,10 @@ print(tokenizer.decode(outputs[0]))
652
  <td>Llama-3.1-8B-Instruct</td><td><center><strong>44.96</strong></center></td><td><center><strong>64.45</strong></center></td><td><center><strong>69.50</strong></center></td><td><center><strong>84.31</strong></center></td><td><center>72.11</center></td><td><center>71.64</center></td><td><center>84.59</center></td><td><center>84.96</center></td>
653
  </tr>
654
  <tr>
655
- <td><em>RoLlama3.1-8b-Instruct-2024-10-09</em></td><td><center><em>13.59</em></center></td><td><center><em>23.56</em></center></td><td><center><em>49.41</em></center></td><td><center><em>62.93</em></center></td><td><center><em><strong>75.89</strong></em></center></td><td><center><em><strong>76.00</strong></em></center></td><td><center><em><strong>86.86</strong></em></center></td><td><center><em><strong>87.05</strong></em></center></td>
656
  </tr>
657
  <tr>
658
- <td>RoLlama3.1-8b-Instruct-DPO-2024-10-09</td><td><center>-</center></td><td><center>-</center></td><td><center>-</center></td><td><center>-</center></td><td><center>-</center></td><td><center>-</center></td><td><center>-</center></td><td><center>-</center></td>
659
  </tr>
660
  </tbody>
661
  </table>
 
540
  ```python
541
  from transformers import AutoTokenizer, AutoModelForCausalLM
542
 
543
+ tokenizer = AutoTokenizer.from_pretrained("OpenLLM-Ro/RoLlama3.1-8b-Instruct")
544
+ model = AutoModelForCausalLM.from_pretrained("OpenLLM-Ro/RoLlama3.1-8b-Instruct")
545
 
546
  instruction = "Ce jocuri de societate pot juca cu prietenii mei?"
547
  chat = [
 
611
  <td><center><strong>RO-EN<br>(Bleu)</strong></center>
612
  </tr>
613
  <tr>
614
+ <td>Llama-3.1-8B-Instruct</td><td><center>95.74</center></td><td><center>59.49</center></td><td><center><strong>98.57</strong></center></td><td><center>82.41</center></td><td><center>19.01</center></td><td><center><strong>27.77</strong></center></td><td><center><strong>29.02</strong></center></td><td><center>39.80</center></td>
615
  </tr>
616
  <tr>
617
  <td><em>RoLlama3.1-8b-Instruct-2024-10-09</em></td><td><center><em>94.56</em></center></td><td><center><em><strong>60.10</strong></em></center></td><td><center><em>95.12</em></center></td><td><center><em><strong>87.53</strong></em></center></td><td><center><em><strong>21.88</strong></em></center></td><td><center><em>23.99</em></center></td><td><center><em>28.27</em></center></td><td><center><em><strong>40.44</strong></em></center></td>
618
  </tr>
619
  <tr>
620
+ <td>RoLlama3.1-8b-Instruct-DPO-2024-10-09</td><td><center><strong>96.10</strong></center></td><td><center>55.37</center></td><td><center>-</center></td><td><center>-</center></td><td><center>21.29</center></td><td><center>21.86</center></td><td><center>-</center></td><td><center>-</center></td>
621
  </tr>
622
  </tbody>
623
  </table>
 
652
  <td>Llama-3.1-8B-Instruct</td><td><center><strong>44.96</strong></center></td><td><center><strong>64.45</strong></center></td><td><center><strong>69.50</strong></center></td><td><center><strong>84.31</strong></center></td><td><center>72.11</center></td><td><center>71.64</center></td><td><center>84.59</center></td><td><center>84.96</center></td>
653
  </tr>
654
  <tr>
655
+ <td><em>RoLlama3.1-8b-Instruct-2024-10-09</em></td><td><center><em>13.59</em></center></td><td><center><em>23.56</em></center></td><td><center><em>49.41</em></center></td><td><center><em>62.93</em></center></td><td><center><em>75.89</em></center></td><td><center><em>76.00</em></center></td><td><center><em><strong>86.86</strong></em></center></td><td><center><em><strong>87.05</strong></em></center></td>
656
  </tr>
657
  <tr>
658
+ <td>RoLlama3.1-8b-Instruct-DPO-2024-10-09</td><td><center>21.58</center></td><td><center>36.54</center></td><td><center>-</center></td><td><center>-</center></td><td><center><strong>78.01</strong></center></td><td><center><strong>77.98</strong></center></td><td><center>-</center></td><td><center>-</center></td>
659
  </tr>
660
  </tbody>
661
  </table>