File size: 3,099 Bytes
6ade039
 
 
 
829f512
 
6ade039
 
 
 
 
 
 
 
 
 
 
 
 
 
bd5680b
 
6ade039
 
 
 
 
 
6d38642
 
0bbff27
6d38642
bd5680b
2393e13
 
 
 
 
 
 
bd5680b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6ade039
4ed057b
6ade039
da2515f
6ade039
bd5680b
6ade039
 
 
 
 
 
4ed057b
6ade039
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import pip
pip.main(['install', 'torch'])
pip.main(['install', 'transformers'])

import re

import torch
import gradio as gr
import transformers
from transformers import AutoTokenizer, AutoModelForSequenceClassification

def load_model(model_name):
    # model
    model = AutoModelForSequenceClassification.from_pretrained(model_name)
    # tokenizer
    tokenizer = AutoTokenizer.from_pretrained(model_name)

    return model, tokenizer


def inference(prompt_inputs):

    model_name = "Unggi/feedback_prize_kor"

    model, tokenizer = load_model(
        model_name = model_name
    )

    # preprocessing
    prompt_inputs = prompt_inputs.replace('\n', ' ')
    prompt_inputs = prompt_inputs.replace('\t', ' ')

    # prompt ꡬ두점 λ‹¨μœ„λ‘œ λΆ„λ¦¬ν•˜κΈ°
    split_list = re.split('[.?!]', prompt_inputs)

    prompt_list = []

    for prompt in split_list:
        if prompt != "":
            prompt_list.append(prompt)

    class_id_list = []

    for prompt in prompt_list:
        inputs = tokenizer(
            prompt, 
            return_tensors="pt"
            )
    
        with torch.no_grad():
            logits = model(**inputs).logits
    
        predicted_class_id = logits.argmax().item()
        class_id = model.config.id2label[predicted_class_id]

        class_id_list.append(class_id)

    outputs = []

    for p, c_id in zip(prompt_list, class_id_list):

        outputs.append(p + '\t' + '=>' + '\t' + '<' + c_id + '>')

    outputs = '\n'.join(outputs)

    return outputs

demo = gr.Interface(
    fn=inference, 
    inputs="text", 
    outputs="text", #return κ°’
    examples=[
        "μ˜ˆμ „λΆ€ν„° μ—¬μ„±μ˜ μ‚¬νšŒμ°Έμ—¬μš΄λ™μ΄λ‚˜ μ‹œμœ„λ₯Ό 지속적이며 힘 μžˆλŠ” ν–‰λ™μœΌλ‘œ ν™•λŒ€ν•΄λ‚˜κ°”μœΌλ©° ν˜„μž¬κΉŒμ§€ 계속 μ–‘μ„±ν‰λ“±μ˜ μ€‘μš”μ„±μ΄ μ£Όλͺ©λ°›κ³  μžˆλ‹€. μ•žμ—μ„œ λ§ν–ˆλ“―μ΄ 아이 μ΄λ¦„μ˜ 성을 μ•„λ²„μ§€μ˜ μ„±μœΌλ‘œ μ§“λ˜ 것을 이제 λΆ€λͺ¨ λͺ¨λ‘λ₯Ό λ°”νƒ•μœΌλ‘œ 이름을 μ§“λŠ” μ–‘κ³„μ œλ„λ₯Ό λ„μž…ν•˜κΈ°λ„ ν•˜κ³  직μž₯μ—μ„œμ˜ 남녀 λͺ¨λ‘ μŠΉμ§„κ³Ό μž„κΈˆ 등을 λ™λ“±ν•˜κ²Œ λΆ„λ°°ν•˜λ©° 남녀가 μ •ν•΄μ Έ 있던 직업듀이 μ„±μ°¨λ³„μ˜ 경계가 λ¬΄λ„ˆμ§κ³Ό λ™μ‹œμ— λˆ„κ΅¬λ‚˜ ν•  수 μžˆλ„λ‘ ν•˜λŠ” 것 이외에 λ§Žμ€ μ–‘μ„±ν‰λ“±μ˜ μ˜ˆλ“€μ΄ λ„λž˜λ˜κ³  μžˆλ‹€. μš”μ•½ν•΄μ„œ 성차별이 κ°œκ°œμΈμ„ κ΅¬μ†ν•˜κ³  μ§‘λ‹¨μ˜ 걸림돌이 λ˜μ–΄ μ‚¬λžŒλ§ˆλ‹€ μžμ‹ μ˜ μ£Όμž₯κ³Ό 행동에 λŒ€ν•œ μ œν•œμ΄ λΆˆκ°€ν”Όν•˜λ©° μ‚¬νšŒμ˜ λ°œμ „μ„ μ €ν•΄ν•˜λŠ” κ²ƒλΏλ§Œ μ•„λ‹ˆλΌ 선진ꡭ λ˜λŠ” λ°”λžŒμ§ν•œ κ΅­κ°€λ‘œ λ‚˜μ•„κ°€λŠ”λ° λ°©ν•΄κ°€ λœλ‹€λŠ” 것인데 이λ₯Ό κ·Ήλ³΅ν•˜κΈ° μœ„ν•΄μ„œλŠ” 성차별에 λŒ€ν•œ 생각을 떨쳐버리고 μ–‘μ„±ν‰λ“±μ΄λž€ 선진적 μ‹œλ―Όμ˜μ‹μ„ κ°–μΆ€μœΌλ‘œ 선진ꡭ을 μœ„ν•΄ κ΅­κ°€μ˜ λ…Έλ ₯도 ν•„μš”ν•˜μ§€λ§Œ, 무엇보닀 개인이 성차별에 λŒ€ν•œ 관념을 잊고 양성평등을 μœ„ν•œ 적극적인 λ…Έλ ₯이 μš”κ΅¬λ˜μ–΄μ•Ό ν•œλ‹€. 그둜 인해 ꡭ가와 κ°œμΈμ— μ˜ν•œ λ°”λžŒμ§ν•œ μ‚¬νšŒκ°€ ν˜•μ„±λœλ‹€λŠ” 것이닀."
    ]
    ).launch() # launch(share=True)λ₯Ό μ„€μ •ν•˜λ©΄ μ™ΈλΆ€μ—μ„œ 접속 κ°€λŠ₯ν•œ 링크가 생성됨

demo.launch()