dk-davidekim commited on
Commit
9873fd5
โ€ข
1 Parent(s): b1f7647

Upload 2 files

Browse files
Files changed (2) hide show
  1. .streamlit/config.toml +4 -0
  2. pages/beta.py +302 -0
.streamlit/config.toml ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ [theme]
2
+
3
+ base = "dark"
4
+ primaryColor="#87CEFA"
pages/beta.py ADDED
@@ -0,0 +1,302 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import requests
3
+ import streamlit as st
4
+ from streamlit_lottie import st_lottie
5
+ import torch
6
+ from transformers import AutoTokenizer, AutoModelForCausalLM
7
+ import re
8
+
9
+ # Page Config
10
+ st.set_page_config(
11
+ page_title="๋…ธ๋ž˜ ๊ฐ€์‚ฌ nํ–‰์‹œ Beta",
12
+ page_icon="๐Ÿ’Œ",
13
+ layout="wide"
14
+ )
15
+ # st.text(os.listdir(os.curdir))
16
+
17
+ ### Model
18
+ tokenizer = AutoTokenizer.from_pretrained("wumusill/final_project_kogpt2")
19
+
20
+ @st.cache(show_spinner=False)
21
+ def load_model():
22
+ model = AutoModelForCausalLM.from_pretrained("wumusill/final_project_kogpt2")
23
+ return model
24
+
25
+ model = load_model()
26
+
27
+ word = pd.read_csv("ballad_word.csv", encoding="cp949")
28
+ # st.dataframe(word)
29
+
30
+ one = word[word["0"].str.startswith("ํ•œ")].sample(1).values[0][0]
31
+ # st.header(type(one))
32
+ # st.header(one)
33
+
34
+
35
+ # Class : Dict ์ค‘๋ณต ํ‚ค ์ถœ๋ ฅ
36
+ class poem(object):
37
+ def __init__(self,letter):
38
+ self.letter = letter
39
+
40
+ def __str__(self):
41
+ return self.letter
42
+
43
+ def __repr__(self):
44
+ return "'"+self.letter+"'"
45
+
46
+
47
+ def beta_poem(input_letter):
48
+ # ๋‘์Œ ๋ฒ•์น™ ์‚ฌ์ „
49
+ dooeum = {"๋ผ":"๋‚˜", "๋ฝ":"๋‚™", "๋ž€":"๋‚œ", "๋ž„":"๋‚ ", "๋žŒ":"๋‚จ", "๋ž":"๋‚ฉ", "๋ž‘":"๋‚ญ",
50
+ "๋ž˜":"๋‚ด", "๋žญ":"๋ƒ‰", "๋ƒ‘":"์•ฝ", "๋žต":"์•ฝ", "๋ƒฅ":"์–‘", "๋Ÿ‰":"์–‘", "๋…€":"์—ฌ",
51
+ "๋ ค":"์—ฌ", "๋…":"์—ญ", "๋ ฅ":"์—ญ", "๋…„":"์—ฐ", "๋ จ":"์—ฐ", "๋…ˆ":"์—ด", "๋ ฌ":"์—ด",
52
+ "๋…":"์—ผ", "๋ ด":"์—ผ", "๋ ต":"์—ฝ", "๋…•":"์˜", "๋ น":"์˜", "๋…œ":"์˜ˆ", "๋ก€":"์˜ˆ",
53
+ "๋กœ":"๋…ธ", "๋ก":"๋…น", "๋ก ":"๋…ผ", "๋กฑ":"๋†", "๋ขฐ":"๋‡Œ", "๋‡จ":"์š”", "๋ฃŒ":"์š”",
54
+ "๋ฃก":"์šฉ", "๋ฃจ":"๋ˆ„", "๋‰ด":"์œ ", "๋ฅ˜":"์œ ", "๋‰ต":"์œก", "๋ฅ™":"์œก", "๋ฅœ":"์œค",
55
+ "๋ฅ ":"์œจ", "๋ฅญ":"์œต", "๋ฅต":"๋Š‘", "๋ฆ„":"๋Š ", "๋ฆ‰":"๋Šฅ", "๋‹ˆ":"์ด", "๋ฆฌ":"์ด",
56
+ "๋ฆฐ":'์ธ', '๋ฆผ':'์ž„', '๋ฆฝ':'์ž…'}
57
+ # ๊ฒฐ๊ณผ๋ฌผ์„ ๋‹ด์„ list
58
+ res_l = []
59
+ len_sequence = 0
60
+
61
+ # ํ•œ ๊ธ€์ž์”ฉ ์ธ๋ฑ์Šค์™€ ํ•จ๊ป˜ ๊ฐ€์ ธ์˜ด
62
+ for idx, val in enumerate(input_letter):
63
+ # ๋‘์Œ ๋ฒ•์น™ ์ ์šฉ
64
+ if val in dooeum.keys():
65
+ val = dooeum[val]
66
+
67
+ # ๋ฐœ๋ผ๋“œ์— ์žˆ๋Š” ๋‹จ์–ด ์ ์šฉ
68
+ try:
69
+ word = words[words.str.startswith(val)].sample(1).values[0]
70
+ except:
71
+ word = val
72
+
73
+ # ์ข€๋” ๋งค๋„๋Ÿฌ์šด ์‚ผํ–‰์‹œ๋ฅผ ์œ„ํ•ด ์ด์ „ ๋ฌธ์žฅ์ด๋ž‘ ํ˜„์žฌ ์Œ์ ˆ ์—ฐ๊ฒฐ
74
+ # ์ดํ›„ generate ๋œ ๋ฌธ์žฅ์—์„œ ์ด์ „ ๋ฌธ์žฅ์— ๋Œ€ํ•œ ๋ฐ์ดํ„ฐ ์ œ๊ฑฐ
75
+ link_with_pre_sentence = (" ".join(res_l)+ " " + word + " " if idx != 0 else word).strip()
76
+ # print(link_with_pre_sentence)
77
+
78
+ # ์—ฐ๊ฒฐ๋œ ๋ฌธ์žฅ์„ ์ธ์ฝ”๋”ฉ
79
+ input_ids = tokenizer.encode(link_with_pre_sentence, add_special_tokens=False, return_tensors="pt")
80
+
81
+ # ์ธ์ฝ”๋”ฉ ๊ฐ’์œผ๋กœ ๋ฌธ์žฅ ์ƒ์„ฑ
82
+ output_sequence = model.generate(
83
+ input_ids=input_ids,
84
+ do_sample=True,
85
+ max_length=42,
86
+ min_length=len_sequence + 2,
87
+ temperature=0.9,
88
+ repetition_penalty=1.5,
89
+ no_repeat_ngram_size=2)
90
+
91
+ # ์ƒ์„ฑ๋œ ๋ฌธ์žฅ ๋ฆฌ์ŠคํŠธ๋กœ ๋ณ€ํ™˜ (์ธ์ฝ”๋”ฉ ๋˜์–ด์žˆ๊ณ , ์ƒ์„ฑ๋œ ๋ฌธ์žฅ ๋’ค๋กœ padding ์ด ์žˆ๋Š” ์ƒํƒœ)
92
+ generated_sequence = output_sequence.tolist()[0]
93
+
94
+ # padding index ์•ž๊นŒ์ง€ slicing ํ•จ์œผ๋กœ์จ padding ์ œ๊ฑฐ, padding์ด ์—†์„ ์ˆ˜๋„ ์žˆ๊ธฐ ๋•Œ๋ฌธ์— ์กฐ๊ฑด๋ฌธ ํ™•์ธ ํ›„ ์ œ๊ฑฐ
95
+ # ์‚ฌ์šฉํ•  generated_sequence ๊ฐ€ 5๋ณด๋‹ค ์งง์œผ๋ฉด ๊ฐ•์ œ์ ์œผ๋กœ ๊ธธ์ด๋ฅผ 8๋กœ ํ•ด์ค€๋‹ค...
96
+ if tokenizer.pad_token_id in generated_sequence:
97
+ check_index = generated_sequence.index(tokenizer.pad_token_id)
98
+ check_index = check_index if check_index-len_sequence > 3 else len_sequence + 8
99
+ generated_sequence = generated_sequence[:check_index]
100
+
101
+ word_encode = tokenizer.encode(word, add_special_tokens=False, return_tensors="pt").tolist()[0][0]
102
+ split_index = len(generated_sequence) - 1 - generated_sequence[::-1].index(word_encode)
103
+
104
+ # ์ฒซ ๊ธ€์ž๊ฐ€ ์•„๋‹ˆ๋ผ๋ฉด, generate ๋œ ์Œ์ ˆ๋งŒ ๊ฒฐ๊ณผ๋ฌผ list์— ๋“ค์–ด๊ฐˆ ์ˆ˜ ์žˆ๊ฒŒ ์•ž ๋ฌธ์žฅ์— ๋Œ€ํ•œ ์ธ์ฝ”๋”ฉ ๊ฐ’ ์ œ๊ฑฐ
105
+ generated_sequence = generated_sequence[split_index:]
106
+
107
+ # print(tokenizer.decode(generated_sequence, clean_up_tokenization_spaces=True, skip_special_tokens=True))
108
+ # ๋‹ค์Œ ์Œ์ ˆ์„ ์œ„ํ•ด ๊ธธ์ด ๊ฐฑ์‹ 
109
+ len_sequence += len([elem for elem in generated_sequence if elem not in(tokenizer.all_special_ids)])
110
+ # ๊ฒฐ๊ณผ๋ฌผ ๋””์ฝ”๋”ฉ
111
+ decoded_sequence = tokenizer.decode(generated_sequence, clean_up_tokenization_spaces=True, skip_special_tokens=True)
112
+
113
+ # ๊ฒฐ๊ณผ๋ฌผ ๋ฆฌ์ŠคํŠธ์— ๋‹ด๊ธฐ
114
+ res_l.append(decoded_sequence)
115
+
116
+ poem_dict = {"Type":"beta"}
117
+
118
+ for letter, res in zip(input_letter, res_l):
119
+ # decode_res = tokenizer.decode(res, clean_up_tokenization_spaces=True, skip_special_tokens=True)
120
+ poem_dict[poem(letter)] = res
121
+
122
+ return poem_dict
123
+
124
+ def alpha_poem(input_letter):
125
+
126
+ # ๋‘์Œ ๋ฒ•์น™ ์‚ฌ์ „
127
+ dooeum = {"๋ผ":"๋‚˜", "๋ฝ":"๋‚™", "๋ž€":"๋‚œ", "๋ž„":"๋‚ ", "๋žŒ":"๋‚จ", "๋ž":"๋‚ฉ", "๋ž‘":"๋‚ญ",
128
+ "๋ž˜":"๋‚ด", "๋žญ":"๋ƒ‰", "๋ƒ‘":"์•ฝ", "๋žต":"์•ฝ", "๋ƒฅ":"์–‘", "๋Ÿ‰":"์–‘", "๋…€":"์—ฌ",
129
+ "๋ ค":"์—ฌ", "๋…":"์—ญ", "๋ ฅ":"์—ญ", "๋…„":"์—ฐ", "๋ จ":"์—ฐ", "๋…ˆ":"์—ด", "๋ ฌ":"์—ด",
130
+ "๋…":"์—ผ", "๋ ด":"์—ผ", "๋ ต":"์—ฝ", "๋…•":"์˜", "๋ น":"์˜", "๋…œ":"์˜ˆ", "๋ก€":"์˜ˆ",
131
+ "๋กœ":"๋…ธ", "๋ก":"๋…น", "๋ก ":"๋…ผ", "๋กฑ":"๋†", "๋ขฐ":"๋‡Œ", "๋‡จ":"์š”", "๋ฃŒ":"์š”",
132
+ "๋ฃก":"์šฉ", "๋ฃจ":"๋ˆ„", "๋‰ด":"์œ ", "๋ฅ˜":"์œ ", "๋‰ต":"์œก", "๋ฅ™":"์œก", "๋ฅœ":"์œค",
133
+ "๋ฅ ":"์œจ", "๋ฅญ":"์œต", "๋ฅต":"๋Š‘", "๋ฆ„":"๋Š ", "๋ฆ‰":"๋Šฅ", "๋‹ˆ":"์ด", "๋ฆฌ":"์ด",
134
+ "๋ฆฐ":'์ธ', '๋ฆผ':'์ž„', '๋ฆฝ':'์ž…'}
135
+ # ๊ฒฐ๊ณผ๋ฌผ์„ ๋‹ด์„ list
136
+ res_l = []
137
+
138
+ # ํ•œ ๊ธ€์ž์”ฉ ์ธ๋ฑ์Šค์™€ ํ•จ๊ป˜ ๊ฐ€์ ธ์˜ด
139
+ for idx, val in enumerate(input_letter):
140
+ # ๋‘์Œ ๋ฒ•์น™ ์ ์šฉ
141
+ if val in dooeum.keys():
142
+ val = dooeum[val]
143
+
144
+
145
+ while True:
146
+ # ๋งŒ์•ฝ idx ๊ฐ€ 0 ์ด๋ผ๋ฉด == ์ฒซ ๊ธ€์ž
147
+ if idx == 0:
148
+ # ์ฒซ ๊ธ€์ž ์ธ์ฝ”๋”ฉ
149
+ input_ids = tokenizer.encode(
150
+ val, add_special_tokens=False, return_tensors="pt")
151
+ # print(f"{idx}๋ฒˆ ์ธ์ฝ”๋”ฉ : {input_ids}\n") # 2์ฐจ์› ํ…์„œ
152
+
153
+ # ์ฒซ ๊ธ€์ž ์ธ์ฝ”๋”ฉ ๊ฐ’์œผ๋กœ ๋ฌธ์žฅ ์ƒ์„ฑ
154
+ output_sequence = model.generate(
155
+ input_ids=input_ids,
156
+ do_sample=True,
157
+ max_length=42,
158
+ min_length=5,
159
+ temperature=0.9,
160
+ repetition_penalty=1.7,
161
+ no_repeat_ngram_size=2)[0]
162
+ # print("์ฒซ ๊ธ€์ž ์ธ์ฝ”๋”ฉ ํ›„ generate ๊ฒฐ๊ณผ:", output_sequence, "\n") # tensor
163
+
164
+ # ์ฒซ ๊ธ€์ž๊ฐ€ ์•„๋‹ˆ๋ผ๋ฉด
165
+ else:
166
+ # ํ•œ ์Œ์ ˆ
167
+ input_ids = tokenizer.encode(
168
+ val, add_special_tokens=False, return_tensors="pt")
169
+ # print(f"{idx}๋ฒˆ ์งธ ๊ธ€์ž ์ธ์ฝ”๋”ฉ : {input_ids} \n")
170
+
171
+ # ์ข€๋” ๋งค๋„๋Ÿฌ์šด ์‚ผํ–‰์‹œ๋ฅผ ์œ„ํ•ด ์ด์ „ ์ธ์ฝ”๋”ฉ๊ณผ ์ง€๊ธˆ ์ธ์ฝ”๋”ฉ ์—ฐ๊ฒฐ
172
+ link_with_pre_sentence = torch.cat((generated_sequence, input_ids[0]), 0)
173
+ link_with_pre_sentence = torch.reshape(link_with_pre_sentence, (1, len(link_with_pre_sentence)))
174
+ # print(f"์ด์ „ ํ…์„œ์™€ ์—ฐ๊ฒฐ๋œ ํ…์„œ {link_with_pre_sentence} \n")
175
+
176
+ # ์ธ์ฝ”๋”ฉ ๊ฐ’์œผ๋กœ ๋ฌธ์žฅ ์ƒ์„ฑ
177
+ output_sequence = model.generate(
178
+ input_ids=link_with_pre_sentence,
179
+ do_sample=True,
180
+ max_length=42,
181
+ min_length=5,
182
+ temperature=0.9,
183
+ repetition_penalty=1.7,
184
+ no_repeat_ngram_size=2)[0]
185
+ # print(f"{idx}๋ฒˆ ์ธ์ฝ”๋”ฉ ํ›„ generate : {output_sequence}")
186
+
187
+ # ์ƒ์„ฑ๋œ ๋ฌธ์žฅ ๋ฆฌ์ŠคํŠธ๋กœ ๋ณ€ํ™˜ (์ธ์ฝ”๋”ฉ ๋˜์–ด์žˆ๊ณ , ์ƒ์„ฑ๋œ ๋ฌธ์žฅ ๋’ค๋กœ padding ์ด ์žˆ๋Š” ์ƒํƒœ)
188
+ generated_sequence = output_sequence.tolist()
189
+ # print(f"{idx}๋ฒˆ ์ธ์ฝ”๋”ฉ ๋ฆฌ์ŠคํŠธ : {generated_sequence} \n")
190
+
191
+ # padding index ์•ž๊นŒ์ง€ slicing ํ•จ์œผ๋กœ์จ padding ์ œ๊ฑฐ, padding์ด ์—†์„ ์ˆ˜๋„ ์žˆ๊ธฐ ๋•Œ๋ฌธ์— ์กฐ๊ฑด๋ฌธ ํ™•์ธ ํ›„ ์ œ๊ฑฐ
192
+ if tokenizer.pad_token_id in generated_sequence:
193
+ generated_sequence = generated_sequence[:generated_sequence.index(tokenizer.pad_token_id)]
194
+
195
+ generated_sequence = torch.tensor(generated_sequence)
196
+ # print(f"{idx}๋ฒˆ ์ธ์ฝ”๋”ฉ ๋ฆฌ์ŠคํŠธ ํŒจ๋”ฉ ์ œ๊ฑฐ ํ›„ ๋‹ค์‹œ ํ…์„œ : {generated_sequence} \n")
197
+
198
+ # ์ฒซ ๊ธ€์ž๊ฐ€ ์•„๋‹ˆ๋ผ๋ฉด, generate ๋œ ์Œ์ ˆ๋งŒ ๊ฒฐ๊ณผ๋ฌผ list์— ๋“ค์–ด๊ฐˆ ์ˆ˜ ์žˆ๊ฒŒ ์•ž ๋ฌธ์žฅ์— ๋Œ€ํ•œ ์ธ์ฝ”๋”ฉ ๊ฐ’ ์ œ๊ฑฐ
199
+ # print(generated_sequence)
200
+ if idx != 0:
201
+ # ์ด์ „ ๋ฌธ์žฅ์˜ ๊ธธ์ด ์ดํ›„๋กœ ์Šฌ๋ผ์ด์‹ฑํ•ด์„œ ์•ž ๋ฌธ์žฅ ์ œ๊ฑฐ
202
+ generated_sequence = generated_sequence[len_sequence:]
203
+
204
+ len_sequence = len(generated_sequence)
205
+ # print("len_seq", len_sequence)
206
+
207
+ # ์Œ์ ˆ ๊ทธ๋Œ€๋กœ ๋ฑ‰์œผ๋ฉด ๋‹ค์‹œ ํ•ด์™€, ์•„๋‹ˆ๋ฉด while๋ฌธ ํƒˆ์ถœ
208
+ if len_sequence > 1:
209
+ break
210
+
211
+ # ๊ฒฐ๊ณผ๋ฌผ ๋ฆฌ์ŠคํŠธ์— ๋‹ด๊ธฐ
212
+ res_l.append(generated_sequence)
213
+
214
+ poem_dict = {"Type":"alpha"}
215
+
216
+ for letter, res in zip(input_letter, res_l):
217
+ decode_res = tokenizer.decode(res, clean_up_tokenization_spaces=True, skip_special_tokens=True)
218
+ poem_dict[poem(letter)] = decode_res
219
+
220
+ return poem_dict
221
+
222
+ # Image(.gif)
223
+ @st.cache(show_spinner=False)
224
+ def load_lottieurl(url: str):
225
+ r = requests.get(url)
226
+ if r.status_code != 200:
227
+ return None
228
+ return r.json()
229
+
230
+ lottie_url = "https://assets7.lottiefiles.com/private_files/lf30_fjln45y5.json"
231
+
232
+ lottie_json = load_lottieurl(lottie_url)
233
+ st_lottie(lottie_json, speed=1, height=200, key="initial")
234
+
235
+
236
+ # Title
237
+ row0_spacer1, row0_1, row0_spacer2, row0_2, row0_spacer3 = st.columns(
238
+ (0.01, 2, 0.05, 0.5, 0.01)
239
+ )
240
+
241
+ with row0_1:
242
+ st.markdown("# ํ•œ๊ธ€ ๋…ธ๋ž˜ ๊ฐ€์‚ฌ nํ–‰์‹œโœ")
243
+ st.markdown("### ๐Ÿฆ๋ฉ‹์Ÿ์ด์‚ฌ์ž์ฒ˜๋Ÿผ AIS7๐Ÿฆ - ํŒŒ์ด๋„ ํ”„๋กœ์ ํŠธ")
244
+
245
+ with row0_2:
246
+ st.write("")
247
+ st.write("")
248
+ st.write("")
249
+ st.subheader("1์กฐ - ํ•ดํŒŒ๋ฆฌ")
250
+ st.write("์ด์ง€ํ˜œ, ์ตœ์ง€์˜, ๊ถŒ์†Œํฌ, ๋ฌธ์ข…ํ˜„, ๊ตฌ์žํ˜„, ๊น€์˜์ค€")
251
+
252
+ st.write('---')
253
+
254
+ # Explanation
255
+ row1_spacer1, row1_1, row1_spacer2 = st.columns((0.01, 0.01, 0.01))
256
+
257
+ with row1_1:
258
+ st.markdown("### nํ–‰์‹œ ๊ฐ€์ด๋“œ๋ผ์ธ")
259
+ st.markdown("1. ํ•˜๋‹จ์— ์žˆ๋Š” ํ…์ŠคํŠธ๋ฐ”์— 5์ž ์ดํ•˜ ๋‹จ์–ด๋ฅผ ๋„ฃ์–ด์ฃผ์„ธ์š”")
260
+ st.markdown("2. 'nํ–‰์‹œ ์ œ์ž‘ํ•˜๊ธฐ' ๋ฒ„ํŠผ์„ ํด๋ฆญํ•ด์ฃผ์„ธ์š”")
261
+
262
+ st.write('---')
263
+
264
+ # Model & Input
265
+ row2_spacer1, row2_1, row2_spacer2= st.columns((0.01, 0.01, 0.01))
266
+
267
+ col1, col2 = st.columns(2)
268
+
269
+ # Word Input
270
+ with row2_1:
271
+
272
+ with col1:
273
+ genre = st.radio(
274
+ "nํ–‰์‹œ ํƒ€์ž… ์„ ํƒ",
275
+ ('Alpha', 'Beta(test์ค‘)'))
276
+
277
+ if genre == 'Alpha':
278
+ n_line_poem = alpha_poem
279
+
280
+ else:
281
+ n_line_poem = beta_poem
282
+
283
+ with col2:
284
+ word_input = st.text_input(
285
+ "nํ–‰์‹œ์— ์‚ฌ์šฉํ•  ๋‹จ์–ด๋ฅผ ์ ๊ณ  ๋ฒ„ํŠผ์„ ๋ˆŒ๋Ÿฌ์ฃผ์„ธ์š”.(์ตœ๋Œ€ 5์ž) ๐Ÿ‘‡",
286
+ placeholder='ํ•œ๊ธ€ ๋‹จ์–ด๋ฅผ ์ž…๋ ฅํ•ด์ฃผ์„ธ์š”',
287
+ max_chars=5
288
+ )
289
+ word_input = re.sub("[^๊ฐ€-ํžฃ]", "", word_input)
290
+
291
+ if st.button('nํ–‰์‹œ ์ œ์ž‘ํ•˜๊ธฐ'):
292
+ if word_input == "":
293
+ st.error("์˜จ์ „ํ•œ ํ•œ๊ธ€ ๋‹จ์–ด๋ฅผ ์‚ฌ์šฉํ•ด์ฃผ์„ธ์š”!")
294
+
295
+ else:
296
+ st.write("nํ–‰์‹œ ๋‹จ์–ด : ", word_input)
297
+ with st.spinner('์ž ์‹œ ๊ธฐ๋‹ค๋ ค์ฃผ์„ธ์š”...'):
298
+ result = n_line_poem(word_input)
299
+ st.success('์™„๋ฃŒ๋์Šต๋‹ˆ๋‹ค!')
300
+ for r in result:
301
+ st.write(f'{r} : {result[r]}')
302
+