korean-gpt-neox-125M
Model Details
Model Description
Uses
Direct Use
from transformers import AutoModelForCausalLM, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("cateto/korean-gpt-neox-125M")
model = AutoModelForCausalLM.from_pretrained("cateto/korean-gpt-neox-125M")
user_input = "์ฐ๋ฆฌ๋ ์์ผ๋ก ๋๋์ ๋ฏธ๋๋ฅผ"
input_ids = tokenizer.encode(user_input, return_tensors="pt")
output_ids = model.generate(
input_ids,
num_beams=4,
repetition_penalty=1.5,
no_repeat_ngram_size=3
)
bot_output = tokenizer.decode(output_ids.tolist()[0], skip_special_tokens=True)
print(f"์ถ๋ ฅ ## ", bot_output)