Spaces:
Runtime error
Runtime error
File size: 7,385 Bytes
33a59e6 02596d3 d0e28f0 4c192ae d0e28f0 fd6be52 5d19482 d0e28f0 5d19482 56d2f69 fd6be52 56d2f69 fd6be52 56d2f69 fd6be52 56d2f69 fd6be52 d0e28f0 fd6be52 9652584 56d2f69 9652584 2ddb46f fd6be52 2ddb46f fd6be52 2ddb46f fd6be52 2ddb46f fd6be52 2ddb46f fd6be52 2ddb46f fd6be52 2ddb46f fd6be52 2ddb46f fd6be52 2ddb46f fd6be52 2ddb46f fd6be52 2ddb46f 56d2f69 2ddb46f 56d2f69 d0e28f0 912d41a 56d2f69 b1f7647 2ddb46f 912d41a d0e28f0 56d2f69 fd6be52 02596d3 fd6be52 02596d3 fd6be52 02596d3 fd6be52 02596d3 fd6be52 02596d3 fd6be52 4c192ae fd6be52 02596d3 fd6be52 02596d3 d0e28f0 02596d3 4c192ae fd6be52 02596d3 4c192ae fd6be52 02596d3 4c192ae |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 |
import requests
import streamlit as st
from streamlit_lottie import st_lottie
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
import re
# Page Config
st.set_page_config(
page_title="๋
ธ๋ ๊ฐ์ฌ nํ์",
page_icon="๐",
layout="wide"
)
### Model
tokenizer = AutoTokenizer.from_pretrained("wumusill/final_project_kogpt2")
@st.cache(show_spinner=False)
def load_model():
model = AutoModelForCausalLM.from_pretrained("wumusill/final_project_kogpt2")
return model
model = load_model()
# Class : Dict ์ค๋ณต ํค ์ถ๋ ฅ
class poem(object):
def __init__(self,letter):
self.letter = letter
def __str__(self):
return self.letter
def __repr__(self):
return "'"+self.letter+"'"
def n_line_poem(input_letter):
# ๋์ ๋ฒ์น ์ฌ์
dooeum = {"๋ผ":"๋", "๋ฝ":"๋", "๋":"๋", "๋":"๋ ", "๋":"๋จ", "๋":"๋ฉ", "๋":"๋ญ",
"๋":"๋ด", "๋ญ":"๋", "๋":"์ฝ", "๋ต":"์ฝ", "๋ฅ":"์", "๋":"์", "๋
":"์ฌ",
"๋ ค":"์ฌ", "๋
":"์ญ", "๋ ฅ":"์ญ", "๋
":"์ฐ", "๋ จ":"์ฐ", "๋
":"์ด", "๋ ฌ":"์ด",
"๋
":"์ผ", "๋ ด":"์ผ", "๋ ต":"์ฝ", "๋
":"์", "๋ น":"์", "๋
":"์", "๋ก":"์",
"๋ก":"๋
ธ", "๋ก":"๋
น", "๋ก ":"๋
ผ", "๋กฑ":"๋", "๋ขฐ":"๋", "๋จ":"์", "๋ฃ":"์",
"๋ฃก":"์ฉ", "๋ฃจ":"๋", "๋ด":"์ ", "๋ฅ":"์ ", "๋ต":"์ก", "๋ฅ":"์ก", "๋ฅ":"์ค",
"๋ฅ ":"์จ", "๋ฅญ":"์ต", "๋ฅต":"๋", "๋ฆ":"๋ ", "๋ฆ":"๋ฅ", "๋":"์ด", "๋ฆฌ":"์ด",
"๋ฆฐ":'์ธ', '๋ฆผ':'์', '๋ฆฝ':'์
'}
# ๊ฒฐ๊ณผ๋ฌผ์ ๋ด์ list
res_l = []
# ํ ๊ธ์์ฉ ์ธ๋ฑ์ค์ ํจ๊ป ๊ฐ์ ธ์ด
for idx, val in enumerate(input_letter):
# ๋์ ๋ฒ์น ์ ์ฉ
if val in dooeum.keys():
val = dooeum[val]
while True:
# ๋ง์ฝ idx ๊ฐ 0 ์ด๋ผ๋ฉด == ์ฒซ ๊ธ์
if idx == 0:
# ์ฒซ ๊ธ์ ์ธ์ฝ๋ฉ
input_ids = tokenizer.encode(
val, add_special_tokens=False, return_tensors="pt")
# print(f"{idx}๋ฒ ์ธ์ฝ๋ฉ : {input_ids}\n") # 2์ฐจ์ ํ
์
# ์ฒซ ๊ธ์ ์ธ์ฝ๋ฉ ๊ฐ์ผ๋ก ๋ฌธ์ฅ ์์ฑ
output_sequence = model.generate(
input_ids=input_ids,
do_sample=True, max_length=42,
min_length=5, temperature=0.9, repetition_penalty=1.5,
no_repeat_ngram_size=2)[0]
# print("์ฒซ ๊ธ์ ์ธ์ฝ๋ฉ ํ generate ๊ฒฐ๊ณผ:", output_sequence, "\n") # tensor
# ์ฒซ ๊ธ์๊ฐ ์๋๋ผ๋ฉด
else:
# ํ ์์
input_ids = tokenizer.encode(
val, add_special_tokens=False, return_tensors="pt")
# print(f"{idx}๋ฒ ์งธ ๊ธ์ ์ธ์ฝ๋ฉ : {input_ids} \n")
# ์ข๋ ๋งค๋๋ฌ์ด ์ผํ์๋ฅผ ์ํด ์ด์ ์ธ์ฝ๋ฉ๊ณผ ์ง๊ธ ์ธ์ฝ๋ฉ ์ฐ๊ฒฐ
link_with_pre_sentence = torch.cat((generated_sequence, input_ids[0]), 0)
link_with_pre_sentence = torch.reshape(link_with_pre_sentence, (1, len(link_with_pre_sentence)))
# print(f"์ด์ ํ
์์ ์ฐ๊ฒฐ๋ ํ
์ {link_with_pre_sentence} \n")
# ์ธ์ฝ๋ฉ ๊ฐ์ผ๋ก ๋ฌธ์ฅ ์์ฑ
output_sequence = model.generate(
input_ids=link_with_pre_sentence,
do_sample=True, max_length=42,
min_length=5, temperature=0.9, repetition_penalty=1.5,
no_repeat_ngram_size=2)[0]
# print(f"{idx}๋ฒ ์ธ์ฝ๋ฉ ํ generate : {output_sequence}")
# ์์ฑ๋ ๋ฌธ์ฅ ๋ฆฌ์คํธ๋ก ๋ณํ (์ธ์ฝ๋ฉ ๋์ด์๊ณ , ์์ฑ๋ ๋ฌธ์ฅ ๋ค๋ก padding ์ด ์๋ ์ํ)
generated_sequence = output_sequence.tolist()
# print(f"{idx}๋ฒ ์ธ์ฝ๋ฉ ๋ฆฌ์คํธ : {generated_sequence} \n")
# padding index ์๊น์ง slicing ํจ์ผ๋ก์จ padding ์ ๊ฑฐ, padding์ด ์์ ์๋ ์๊ธฐ ๋๋ฌธ์ ์กฐ๊ฑด๋ฌธ ํ์ธ ํ ์ ๊ฑฐ
if tokenizer.pad_token_id in generated_sequence:
generated_sequence = generated_sequence[:generated_sequence.index(tokenizer.pad_token_id)]
generated_sequence = torch.tensor(generated_sequence)
# print(f"{idx}๋ฒ ์ธ์ฝ๋ฉ ๋ฆฌ์คํธ ํจ๋ฉ ์ ๊ฑฐ ํ ๋ค์ ํ
์ : {generated_sequence} \n")
# ์ฒซ ๊ธ์๊ฐ ์๋๋ผ๋ฉด, generate ๋ ์์ ๋ง ๊ฒฐ๊ณผ๋ฌผ list์ ๋ค์ด๊ฐ ์ ์๊ฒ ์ ๋ฌธ์ฅ์ ๋ํ ์ธ์ฝ๋ฉ ๊ฐ ์ ๊ฑฐ
# print(generated_sequence)
if idx != 0:
# ์ด์ ๋ฌธ์ฅ์ ๊ธธ์ด ์ดํ๋ก ์ฌ๋ผ์ด์ฑํด์ ์ ๋ฌธ์ฅ ์ ๊ฑฐ
generated_sequence = generated_sequence[len_sequence:]
len_sequence = len(generated_sequence)
# print("len_seq", len_sequence)
# ์์ ๊ทธ๋๋ก ๋ฑ์ผ๋ฉด ๋ค์ ํด์, ์๋๋ฉด while๋ฌธ ํ์ถ
if len_sequence > 1:
break
# ๊ฒฐ๊ณผ๋ฌผ ๋ฆฌ์คํธ์ ๋ด๊ธฐ
res_l.append(generated_sequence)
poem_dict = {}
for letter, res in zip(input_letter, res_l):
decode_res = tokenizer.decode(res, clean_up_tokenization_spaces=True, skip_special_tokens=True)
poem_dict[poem(letter)] = decode_res
return poem_dict
###
# Image(.gif)
@st.cache(show_spinner=False)
def load_lottieurl(url: str):
r = requests.get(url)
if r.status_code != 200:
return None
return r.json()
lottie_url = "https://assets7.lottiefiles.com/private_files/lf30_fjln45y5.json"
lottie_json = load_lottieurl(lottie_url)
st_lottie(lottie_json, speed=1, height=200, key="initial")
# Title
row0_spacer1, row0_1, row0_spacer2, row0_2, row0_spacer3 = st.columns(
(0.01, 2, 0.05, 0.5, 0.01)
)
with row0_1:
st.markdown("# ํ๊ธ ๋
ธ๋ ๊ฐ์ฌ nํ์โ")
st.markdown("### ๐ฆ๋ฉ์์ด์ฌ์์ฒ๋ผ AIS7๐ฆ - ํ์ด๋ ํ๋ก์ ํธ")
with row0_2:
st.write("")
st.write("")
st.write("")
st.subheader("1์กฐ - ํดํ๋ฆฌ")
st.write("์ด์งํ, ์ต์ง์, ๊ถ์ํฌ, ๋ฌธ์ข
ํ, ๊ตฌ์ํ, ๊น์์ค")
st.write('---')
# Explanation
row1_spacer1, row1_1, row1_spacer2 = st.columns((0.01, 0.01, 0.01))
with row1_1:
st.markdown("### nํ์ ๊ฐ์ด๋๋ผ์ธ")
st.markdown("1. ํ๋จ์ ์๋ ํ
์คํธ๋ฐ์ 5์ ์ดํ ํ๊ธ ๋จ์ด๋ฅผ ๋ฃ์ด์ฃผ์ธ์")
st.markdown("2. 'nํ์ ์ ์ํ๊ธฐ' ๋ฒํผ์ ํด๋ฆญํด์ฃผ์ธ์")
st.write('---')
# Model & Input
row2_spacer1, row2_1, row2_spacer2= st.columns((0.01, 0.01, 0.01))
# Word Input
with row2_1:
word_input = st.text_input(
"nํ์์ ์ฌ์ฉํ ํ๊ธ ๋จ์ด๋ฅผ ์ ๊ณ ๋ฒํผ์ ๋๋ฌ์ฃผ์ธ์.(์ต๋ 5์) ๐",
placeholder='ํ๊ธ ๋จ์ด๋ฅผ ์
๋ ฅํด์ฃผ์ธ์',
max_chars=5
)
word_input = re.sub("[^๊ฐ-ํฃ]", "", word_input)
if st.button('nํ์ ์ ์ํ๊ธฐ'):
if word_input == "":
st.error("์จ์ ํ ํ๊ธ ๋จ์ด๋ฅผ ์ฌ์ฉํด์ฃผ์ธ์!")
else:
st.write("nํ์ ๋จ์ด : ", word_input)
with st.spinner('์ ์ ๊ธฐ๋ค๋ ค์ฃผ์ธ์...'):
result = n_line_poem(word_input)
st.success('์๋ฃ๋์ต๋๋ค!')
for r in result:
st.write(f'{r} : {result[r]}') |