Create README.md
Browse files
README.md
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: apache-2.0
|
3 |
+
base_model: xaviviro/FLAMA-0.1-3B
|
4 |
+
language:
|
5 |
+
- ca
|
6 |
+
- en
|
7 |
+
model_creator: xaviviro
|
8 |
+
model_name: FLAMA-0.1-3B
|
9 |
+
prompt_template: '<|im_start|>user\n{instruction}<|im_end|>\n<|im_start|>assistant\n'
|
10 |
+
quantized_by: xaviviro
|
11 |
+
---
|
12 |
+
|
13 |
+
# FLAMA: Model 3B ChatML en Català. Versió 0.1
|
14 |
+
|
15 |
+
FLAMA és el primer model petit 3B en català. És el resultat de finetunejar el model [open_llama_3b_v2](/openlm-research/open_llama_3b_v2) amb les instruccions d'[OpenAssistant](/xaviviro/oasst1_ca) traduïdes automàticament al català amb recursos de [Helsinki-NLP](/Helsinki-NLP) i tractades en format ChatML.
|
16 |
+
|
17 |
+
# Prompt Template
|
18 |
+
|
19 |
+
FLAMA usa ChatML com a prompt template:
|
20 |
+
|
21 |
+
```
|
22 |
+
<|im_start|>user
|
23 |
+
Qui va ser Isaac Newton?<|im_end|>
|
24 |
+
<|im_start|>assistant\n
|
25 |
+
```
|
26 |
+
|
27 |
+
|
28 |
+
|
29 |
+
## Referències
|
30 |
+
|
31 |
+
```
|
32 |
+
@software{xaviviro2023flama,
|
33 |
+
author = {xaviviro},
|
34 |
+
title = {FLAMA: Model 3B ChatML en Català. Versió 0.1},
|
35 |
+
month = December,
|
36 |
+
year = 2023,
|
37 |
+
url = {https://huggingface.co/xaviviro/FLAMA-0.1-3B}
|
38 |
+
}
|
39 |
+
```
|
40 |
+
|
41 |
+
```
|
42 |
+
@software{openlm2023openllama,
|
43 |
+
author = {Geng, Xinyang and Liu, Hao},
|
44 |
+
title = {OpenLLaMA: An Open Reproduction of LLaMA},
|
45 |
+
month = May,
|
46 |
+
year = 2023,
|
47 |
+
url = {https://github.com/openlm-research/open_llama}
|
48 |
+
}
|
49 |
+
```
|
50 |
+
```
|
51 |
+
@software{together2023redpajama,
|
52 |
+
author = {Together Computer},
|
53 |
+
title = {RedPajama-Data: An Open Source Recipe to Reproduce LLaMA training dataset},
|
54 |
+
month = April,
|
55 |
+
year = 2023,
|
56 |
+
url = {https://github.com/togethercomputer/RedPajama-Data}
|
57 |
+
}
|
58 |
+
```
|
59 |
+
```
|
60 |
+
@article{touvron2023llama,
|
61 |
+
title={Llama: Open and efficient foundation language models},
|
62 |
+
author={Touvron, Hugo and Lavril, Thibaut and Izacard, Gautier and Martinet, Xavier and Lachaux, Marie-Anne and Lacroix, Timoth{\'e}e and Rozi{\`e}re, Baptiste and Goyal, Naman and Hambro, Eric and Azhar, Faisal and others},
|
63 |
+
journal={arXiv preprint arXiv:2302.13971},
|
64 |
+
year={2023}
|
65 |
+
}
|
66 |
+
```
|