Spaces:
Build error
Build error
import gradio as gr | |
from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM, StoppingCriteriaList | |
import os | |
import torch | |
theme = "darkgrass" | |
title = "GPT-NeoX(Korean) Demo" | |
model_name = "EleutherAI/gpt-neox-ko-1.3b" | |
description = "GPT-NeoX νκ΅μ΄ λͺ¨λΈμ μμ°νλ λ°λͺ¨νμ΄μ§ μ λλ€." | |
# article = "<p style='text-align: center'><a href='https://github.com/kingoflolz/mesh-transformer-jax' target='_blank'>GPT-J-6B: A 6 Billion Parameter Autoregressive Language Model</a></p>" | |
examples = [ | |
["μΈκ°μ²λΌ μκ°νκ³ , νλνλ 'μ§λ₯'μ ν΅ν΄"], | |
["μ§λ¬Έ: νκ°μλ₯Ό κ°μ₯ μλ―Έμκ² λ³΄λΌμ μλ λ°©λ²μ 무μμΌκΉμ? \nλ΅λ³:"], | |
["2040λ λ―Έκ΅μ, "] | |
] | |
tokenizer = AutoTokenizer.from_pretrained("EleutherAI/gpt-neox-ko-1.3b", use_auth_token=os.environ['TOKEN']) | |
model = AutoModelForCausalLM.from_pretrained( | |
"EleutherAI/gpt-neox-ko-1.3b", use_auth_token=os.environ['TOKEN'], | |
pad_token_id=tokenizer.pad_token_id, | |
eos_token_id=tokenizer.eos_token_id, | |
bos_token_id=tokenizer.bos_token_id | |
) | |
model.eval() | |
pipe = pipeline('text-generation', model=model, tokenizer=tokenizer) | |
def predict(text): | |
stopping_cond = StoppingCriteriaList([tokenizer.encode('<|endoftext|>')]) | |
with torch.no_grad(): | |
tokens = tokenizer(text, return_tensors="pt").input_ids | |
gen_tokens = model.generate(tokens, do_sample=True, temperature=0.8, max_new_tokens=64) | |
generated = tokenizer.batch_decode(gen_tokens)[0] | |
return generated | |
# return pipe(text)[0]['generated_text'] | |
iface = gr.Interface( | |
fn=predict, | |
inputs='text', | |
outputs='text', | |
examples=examples | |
) | |
iface.launch() | |
# print(generated) # print: μΈκ°μ²λΌ μκ°νκ³ , νλνλ 'μ§λ₯'μ ν΅ν΄ μΈλ₯κ° μ΄μ κΉμ§ νμ§ λͺ»νλ λ¬Έμ μ ν΄λ΅μ μ°Ύμ μ μμ κ²μ΄λ€. κ³ΌνκΈ°μ μ΄ κ³ λλ‘ λ°λ¬ν 21μΈκΈ°λ₯Ό μ΄μκ° μ°λ¦¬ μμ΄λ€μκ² κ°μ₯ νμν κ²μ μ¬κ³ λ ₯ νλ ¨μ΄λ€. μ¬κ³ λ ₯ νλ ¨μ ν΅ν΄, μΈμ | |