mihaimasala
commited on
Commit
•
68c5044
1
Parent(s):
246fbc2
Update README.md
Browse files
README.md
CHANGED
@@ -54,8 +54,8 @@ Use the code below to get started with the model.
|
|
54 |
```python
|
55 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
56 |
|
57 |
-
tokenizer = AutoTokenizer.from_pretrained("OpenLLM-Ro/RoLlama2-7b-Chat
|
58 |
-
model = AutoModelForCausalLM.from_pretrained("OpenLLM-Ro/RoLlama2-7b-Chat
|
59 |
|
60 |
instruction = "Care este cel mai înalt vârf muntos din România?"
|
61 |
chat = [
|
@@ -74,18 +74,18 @@ print(tokenizer.decode(outputs[0]))
|
|
74 |
| Model | Average | ARC | MMLU |Winogrande|HellaSwag | GSM8k |TruthfulQA|
|
75 |
|--------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|
|
76 |
| Llama-2-7b | 35.65 | 33.85 | 30.93 | 56.43 | 46.98 | 1.37 | 44.36 |
|
77 |
-
| RoLlama2-7b-Base
|
78 |
| Llama-2-7b-chat | 35.58 | 34.92 | 32.37 | 54.26 | 44.52 | 2.05 | 45.38 |
|
79 |
-
|RoLlama2-7b-Instruct
|
80 |
-
|*RoLlama2-7b-Chat
|
81 |
|
82 |
## MT-Bench
|
83 |
|
84 |
| Model | Average | 1st turn | 2nd turn |
|
85 |
|--------------------|:--------:|:--------:|:--------:|
|
86 |
| Llama-2-7b-chat | 1.70 | 2.00 | 1.41 |
|
87 |
-
|RoLlama2-7b-Instruct
|
88 |
-
|*RoLlama2-7b-Chat
|
89 |
|
90 |
|
91 |
|
@@ -93,9 +93,9 @@ print(tokenizer.decode(outputs[0]))
|
|
93 |
|
94 |
| Model | Link |
|
95 |
|--------------------|:--------:|
|
96 |
-
|RoLlama2-7b-Base
|
97 |
-
|RoLlama2-7b-Instruct
|
98 |
-
|*RoLlama2-7b-Chat
|
99 |
|
100 |
|
101 |
<!--
|
|
|
54 |
```python
|
55 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
56 |
|
57 |
+
tokenizer = AutoTokenizer.from_pretrained("OpenLLM-Ro/RoLlama2-7b-Chat")
|
58 |
+
model = AutoModelForCausalLM.from_pretrained("OpenLLM-Ro/RoLlama2-7b-Chat")
|
59 |
|
60 |
instruction = "Care este cel mai înalt vârf muntos din România?"
|
61 |
chat = [
|
|
|
74 |
| Model | Average | ARC | MMLU |Winogrande|HellaSwag | GSM8k |TruthfulQA|
|
75 |
|--------------------|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|:--------:|
|
76 |
| Llama-2-7b | 35.65 | 33.85 | 30.93 | 56.43 | 46.98 | 1.37 | 44.36 |
|
77 |
+
| RoLlama2-7b-Base | 38.32 | 35.83 | 30.47 | 60.16 | 55.52 | 2.17 | 45.78 |
|
78 |
| Llama-2-7b-chat | 35.58 | 34.92 | 32.37 | 54.26 | 44.52 | 2.05 | 45.38 |
|
79 |
+
|RoLlama2-7b-Instruct|**44.42**|**40.36**|**37.41**|**69.58** | 55.64 |**17.59**| 45.96 |
|
80 |
+
|*RoLlama2-7b-Chat* | *42.65* | *38.29* | *35.27* | *65.25* |***56.45***| *12.84* | ***47.79***|
|
81 |
|
82 |
## MT-Bench
|
83 |
|
84 |
| Model | Average | 1st turn | 2nd turn |
|
85 |
|--------------------|:--------:|:--------:|:--------:|
|
86 |
| Llama-2-7b-chat | 1.70 | 2.00 | 1.41 |
|
87 |
+
|RoLlama2-7b-Instruct| **4.31**|**5.66**| 2.95 |
|
88 |
+
|*RoLlama2-7b-Chat* | *3.91* | *4.25* | ***3.57*** |
|
89 |
|
90 |
|
91 |
|
|
|
93 |
|
94 |
| Model | Link |
|
95 |
|--------------------|:--------:|
|
96 |
+
|RoLlama2-7b-Base | [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Base) |
|
97 |
+
|RoLlama2-7b-Instruct| [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Instruct) |
|
98 |
+
|*RoLlama2-7b-Chat* | [link](https://huggingface.co/OpenLLM-Ro/RoLlama2-7b-Chat) |
|
99 |
|
100 |
|
101 |
<!--
|