|
--- |
|
license: cc-by-sa-4.0 |
|
task_categories: |
|
- question-answering |
|
- summarization |
|
- text-generation |
|
|
|
language: |
|
- zh |
|
- en |
|
|
|
size_categories: |
|
- 10K<n<100K |
|
|
|
--- |
|
Language Models for Taiwanese Culture training dataset. |
|
|
|
|
|
## Citation |
|
|
|
Please cite the repo if you use the data or code in this repo. |
|
|
|
``` |
|
@inproceedings{lin-chen-2023-llm, |
|
title = "{LLM}-Eval: Unified Multi-Dimensional Automatic Evaluation for Open-Domain Conversations with Large Language Models", |
|
author = "Lin, Yen-Ting and Chen, Yun-Nung", |
|
booktitle = "Proceedings of the 5th Workshop on NLP for Conversational AI (NLP4ConvAI 2023)", |
|
month = jul, |
|
year = "2023", |
|
address = "Toronto, Canada", |
|
publisher = "Association for Computational Linguistics", |
|
url = "https://aclanthology.org/2023.nlp4convai-1.5", |
|
pages = "47--58" |
|
} |
|
|
|
@misc{taiwanllama, |
|
author={Lin, Yen-Ting and Chen, Yun-Nung}, |
|
title={Taiwanese-Aligned Language Models based on Meta-Llama2}, |
|
year={2023}, |
|
url={https://github.com/adamlin120/Taiwan-LLaMa}, |
|
note={Code and models available at https://github.com/adamlin120/Taiwan-LLaMa}, |
|
} |
|
``` |