Spaces:
Running
Running
import pip | |
pip.main(['install', 'torch']) | |
pip.main(['install', 'transformers']) | |
import re | |
import torch | |
import gradio as gr | |
import transformers | |
from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
def load_model(model_name): | |
# model | |
model = AutoModelForSequenceClassification.from_pretrained(model_name) | |
# tokenizer | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
return model, tokenizer | |
def inference(prompt_inputs): | |
model_name = "Unggi/feedback_prize_kor" | |
model, tokenizer = load_model( | |
model_name = model_name | |
) | |
# preprocessing | |
prompt_inputs = prompt_inputs.replace('\n', ' ') | |
prompt_inputs = prompt_inputs.replace('\t', ' ') | |
# prompt ꡬλμ λ¨μλ‘ λΆλ¦¬νκΈ° | |
split_list = re.split('[.?!]', prompt_inputs) | |
prompt_list = [] | |
for prompt in split_list: | |
if prompt != "": | |
prompt_list.append(prompt) | |
class_id_list = [] | |
for prompt in prompt_list: | |
inputs = tokenizer( | |
prompt, | |
return_tensors="pt" | |
) | |
with torch.no_grad(): | |
logits = model(**inputs).logits | |
predicted_class_id = logits.argmax().item() | |
class_id = model.config.id2label[predicted_class_id] | |
class_id_list.append(class_id) | |
outputs = [] | |
for p, c_id in zip(prompt_list, class_id_list): | |
outputs.append(p + '\t' + '=>' + '\t' + '<' + c_id + '>') | |
outputs = '\n'.join(outputs) | |
return outputs | |
demo = gr.Interface( | |
fn=inference, | |
inputs="text", | |
outputs="text", #return κ° | |
examples=[ | |
"μμ λΆν° μ¬μ±μ μ¬νμ°Έμ¬μ΄λμ΄λ μμλ₯Ό μ§μμ μ΄λ©° ν μλ νλμΌλ‘ νλν΄λκ°μΌλ©° νμ¬κΉμ§ κ³μ μμ±νλ±μ μ€μμ±μ΄ μ£Όλͺ©λ°κ³ μλ€. μμμ λ§νλ―μ΄ μμ΄ μ΄λ¦μ μ±μ μλ²μ§μ μ±μΌλ‘ μ§λ κ²μ μ΄μ λΆλͺ¨ λͺ¨λλ₯Ό λ°νμΌλ‘ μ΄λ¦μ μ§λ μκ³μ λλ₯Ό λμ νκΈ°λ νκ³ μ§μ₯μμμ λ¨λ λͺ¨λ μΉμ§κ³Ό μκΈ λ±μ λλ±νκ² λΆλ°°νλ©° λ¨λ κ° μ ν΄μ Έ μλ μ§μ λ€μ΄ μ±μ°¨λ³μ κ²½κ³κ° 무λμ§κ³Ό λμμ λꡬλ ν μ μλλ‘ νλ κ² μ΄μΈμ λ§μ μμ±νλ±μ μλ€μ΄ λλλκ³ μλ€. μμ½ν΄μ μ±μ°¨λ³μ΄ κ°κ°μΈμ ꡬμνκ³ μ§λ¨μ κ±Έλ¦Όλμ΄ λμ΄ μ¬λλ§λ€ μμ μ μ£Όμ₯κ³Ό νλμ λν μ νμ΄ λΆκ°νΌνλ©° μ¬νμ λ°μ μ μ ν΄νλ κ²λΏλ§ μλλΌ μ μ§κ΅ λλ λ°λμ§ν κ΅κ°λ‘ λμκ°λλ° λ°©ν΄κ° λλ€λ κ²μΈλ° μ΄λ₯Ό 극볡νκΈ° μν΄μλ μ±μ°¨λ³μ λν μκ°μ λ¨μ³λ²λ¦¬κ³ μμ±νλ±μ΄λ μ μ§μ μλ―Όμμμ κ°μΆ€μΌλ‘ μ μ§κ΅μ μν΄ κ΅κ°μ λ Έλ ₯λ νμνμ§λ§, 무μλ³΄λ€ κ°μΈμ΄ μ±μ°¨λ³μ λν κ΄λ μ μκ³ μμ±νλ±μ μν μ κ·Ήμ μΈ λ Έλ ₯μ΄ μꡬλμ΄μΌ νλ€. κ·Έλ‘ μΈν΄ κ΅κ°μ κ°μΈμ μν λ°λμ§ν μ¬νκ° νμ±λλ€λ κ²μ΄λ€." | |
] | |
).launch() # launch(share=True)λ₯Ό μ€μ νλ©΄ μΈλΆμμ μ μ κ°λ₯ν λ§ν¬κ° μμ±λ¨ | |
demo.launch() |