Update README.md
Browse files
README.md
CHANGED
@@ -4,6 +4,9 @@
|
|
4 |
- [lassl](https://github.com/lassl/lassl) μ€νμμ€ νλ‘μ νΈλ₯Ό νμ©νμ¬ νμ΅νμμ΅λλ€.
|
5 |
- μ¬μ νμ΅λ§ μ§νλ λͺ¨λΈμ΄λ―λ‘ μλμ κ°μ΄ UL2μ denoisingμ νμΈν΄λ³΄μ€ μ μμ΅λλ€.
|
6 |
```py
|
|
|
|
|
|
|
7 |
for prefix_token in ("[NLU]","[NLG]","[S2S]"):
|
8 |
input_string = f"{prefix_token}μ΄λ€ μννΈλ νΈκ°κ° [new_id_27]λλ± κ²½κΈ° μΉ¨μ²΄λ‘ μΈν [new_id_26]λ₯Ό νμΈν μ μμμ΅λλ€.</s>"
|
9 |
inputs = tokenizer(input_string, return_tensors="pt", add_special_tokens=False)
|
|
|
4 |
- [lassl](https://github.com/lassl/lassl) μ€νμμ€ νλ‘μ νΈλ₯Ό νμ©νμ¬ νμ΅νμμ΅λλ€.
|
5 |
- μ¬μ νμ΅λ§ μ§νλ λͺ¨λΈμ΄λ―λ‘ μλμ κ°μ΄ UL2μ denoisingμ νμΈν΄λ³΄μ€ μ μμ΅λλ€.
|
6 |
```py
|
7 |
+
model = T5ForConditionalGeneration.from_pretrained("DaehanKim/KoUL2")
|
8 |
+
tokenizer = AutoTokenizer.from_pretrained("DaehanKim/KoUL2")
|
9 |
+
|
10 |
for prefix_token in ("[NLU]","[NLG]","[S2S]"):
|
11 |
input_string = f"{prefix_token}μ΄λ€ μννΈλ νΈκ°κ° [new_id_27]λλ± κ²½κΈ° μΉ¨μ²΄λ‘ μΈν [new_id_26]λ₯Ό νμΈν μ μμμ΅λλ€.</s>"
|
12 |
inputs = tokenizer(input_string, return_tensors="pt", add_special_tokens=False)
|