Spaces:
openfree
/
Running on CPU Upgrade

seawolf2357 commited on
Commit
5313f78
ยท
verified ยท
1 Parent(s): 9395df5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +70 -150
app.py CHANGED
@@ -3,46 +3,38 @@ import requests
3
  import json
4
  import os
5
  from datetime import datetime, timedelta
6
- from huggingface_hub import InferenceClient # LLM ์‚ฌ์šฉ์„ ์œ„ํ•ด ํ•„์š”
7
-
8
- # ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ API ํ‚ค ๊ฐ€์ ธ์˜ค๊ธฐ
9
- API_KEY = os.getenv("SERPHOUSE_API_KEY") # ๋ณธ์ธ์˜ SerpHouse API ํ‚ค๋ฅผ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋กœ ์„ค์ •ํ•˜์„ธ์š”.
10
- HF_TOKEN = os.getenv("HF_TOKEN") # Hugging Face API ํ† ํฐ์„ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋กœ ์„ค์ •ํ•˜์„ธ์š”.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
- MAJOR_COUNTRIES = [
13
- "United States", "United Kingdom", "Canada", "Australia", "Germany",
14
- "France", "Japan", "South Korea", "China", "India",
15
- "Brazil", "Mexico", "Russia", "Italy", "Spain",
16
- "Netherlands", "Sweden", "Switzerland", "Norway", "Denmark",
17
- "Finland", "Belgium", "Austria", "New Zealand", "Ireland",
18
- "Singapore", "Hong Kong", "Israel", "United Arab Emirates", "Saudi Arabia",
19
- "South Africa", "Turkey", "Egypt", "Poland", "Czech Republic",
20
- "Hungary", "Greece", "Portugal", "Argentina", "Chile",
21
- "Colombia", "Peru", "Venezuela", "Thailand", "Malaysia",
22
- "Indonesia", "Philippines", "Vietnam", "Pakistan", "Bangladesh"
23
- ]
24
 
25
  def search_serphouse(query, country, page=1, num_result=100):
26
  url = "https://api.serphouse.com/serp/live"
27
-
28
- # ๊ตญ๊ฐ€๋ณ„ ๋„๋ฉ”์ธ ๋งคํ•‘
29
- country_domains = {
30
- "United States": "google.com",
31
- "United Kingdom": "google.co.uk",
32
- "Canada": "google.ca",
33
- "Australia": "google.com.au",
34
- "Germany": "google.de",
35
- "France": "google.fr",
36
- "Japan": "google.co.jp",
37
- "South Korea": "google.co.kr",
38
- "China": "google.com.hk", # ์ค‘๊ตญ์€ ํ™์ฝฉ ๋„๋ฉ”์ธ ์‚ฌ์šฉ
39
- "India": "google.co.in",
40
- # ํ•„์š”ํ•œ ๋‹ค๋ฅธ ๊ตญ๊ฐ€๋“ค์˜ ๋„๋ฉ”์ธ์„ ์ถ”๊ฐ€ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.
41
- }
42
-
43
- # ์„ ํƒ๋œ ๊ตญ๊ฐ€์˜ ๋„๋ฉ”์ธ ๊ฐ€์ ธ์˜ค๊ธฐ (์—†์œผ๋ฉด ๊ธฐ๋ณธ๊ฐ’ google.com ์‚ฌ์šฉ)
44
- domain = country_domains.get(country, "google.com")
45
-
46
  now = datetime.utcnow()
47
  yesterday = now - timedelta(days=1)
48
  date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"
@@ -50,8 +42,8 @@ def search_serphouse(query, country, page=1, num_result=100):
50
  payload = {
51
  "data": {
52
  "q": query,
53
- "domain": domain, # ๊ตญ๊ฐ€๋ณ„ ๋„๋ฉ”์ธ ์‚ฌ์šฉ
54
- "loc": country, # ๊ตญ๊ฐ€ ์ด๋ฆ„
55
  "lang": "en",
56
  "device": "desktop",
57
  "serp_type": "news",
@@ -73,87 +65,49 @@ def search_serphouse(query, country, page=1, num_result=100):
73
  response.raise_for_status()
74
  return response.json()
75
  except requests.RequestException as e:
76
- error_msg = f"Error: {str(e)}"
77
- if hasattr(response, 'text'):
78
- error_msg += f"\nResponse content: {response.text}"
79
- return {"error": error_msg}
80
 
81
  def format_results_from_raw(results):
82
- try:
83
- if isinstance(results, dict) and "error" in results:
84
- return "Error: " + results["error"], []
85
-
86
- if not isinstance(results, dict):
87
- raise ValueError("๊ฒฐ๊ณผ๊ฐ€ ์‚ฌ์ „ ํ˜•์‹์ด ์•„๋‹™๋‹ˆ๋‹ค.")
88
-
89
- # 'results' ํ‚ค ๋‚ด๋ถ€์˜ ๊ตฌ์กฐ ํ™•์ธ
90
- if 'results' in results:
91
- results_content = results['results']
92
- if 'results' in results_content:
93
- results_content = results_content['results']
94
- if 'news' in results_content:
95
- news_results = results_content['news']
96
- else:
97
- news_results = []
98
- else:
99
- news_results = []
100
- else:
101
- news_results = []
102
 
 
 
103
  if not news_results:
104
  return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", []
105
 
106
  articles = []
107
-
108
  for idx, result in enumerate(news_results, 1):
109
- title = result.get("title", "์ œ๋ชฉ ์—†์Œ")
110
- link = result.get("url", result.get("link", "#"))
111
- snippet = result.get("snippet", "๋‚ด์šฉ ์—†์Œ")
112
- channel = result.get("channel", result.get("source", "์•Œ ์ˆ˜ ์—†์Œ"))
113
- time = result.get("time", result.get("date", "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๊ฐ„"))
114
- image_url = result.get("img", result.get("thumbnail", ""))
115
-
116
  articles.append({
117
  "index": idx,
118
- "title": title,
119
- "link": link,
120
- "snippet": snippet,
121
- "channel": channel,
122
- "time": time,
123
- "image_url": image_url
124
  })
125
-
126
  return "", articles
127
-
128
  except Exception as e:
129
- error_message = f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
130
- return "Error: " + error_message, []
131
 
132
  def serphouse_search(query, country):
133
- page = 1
134
- num_result = 100 # num_result๋ฅผ 100์œผ๋กœ ์„ค์ •
135
- results = search_serphouse(query, country, page, num_result)
136
- error_message, articles = format_results_from_raw(results)
137
- return error_message, articles
138
 
139
- # LLM ์„ค์ •
140
  hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)
141
 
142
  def summarize_article(title, snippet):
143
  try:
144
  prompt = f"๋‹ค์Œ ๋‰ด์Šค ์ œ๋ชฉ๊ณผ ์š”์•ฝ์„ ๋ฐ”ํƒ•์œผ๋กœ ํ•œ๊ตญ์–ด๋กœ 1๋ฌธ์žฅ์œผ๋กœ ๊ธ์ • ๋˜๋Š” ์ค‘๋ฆฝ ๋˜๋Š” ๋ถ€์ •์  ์„ฑ๊ฒฉ์˜ ๊ธฐ์‚ฌ์ธ์ง€ ํŒ๋‹จํ•˜๋ผ. ์ ˆ๋Œ€ ํ”„๋กฌํ”„ํŠธ ๋ฐ ์ง€์‹œ๋ฌธ ๋“ฑ์„ ๋…ธ์ถœํ•˜์ง€ ๋ง๊ณ  ์ค‘๋ณต์—†์ด ์˜ค๋กœ์ง€ 1๋ฌธ์žฅ์˜ ๊ฒฐ๊ณผ๊ฐ’๋งŒ ์ถœ๋ ฅํ•˜๋ผ.:\n์ œ๋ชฉ: {title}\n์š”์•ฝ: {snippet}"
145
- summary = hf_client.text_generation(prompt, max_new_tokens=500)
146
- return summary
147
  except Exception as e:
148
- return f"๋ถ„์„์„ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
149
 
150
  css = """
151
- footer {
152
- visibility: hidden;
153
- }
154
  """
155
 
156
- # Gradio ์ธํ„ฐํŽ˜์ด์Šค ๊ตฌ์„ฑ
157
  with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as iface:
158
  gr.Markdown("๊ฒ€์ƒ‰์–ด๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ์›ํ•˜๋Š” ๊ตญ๊ฐ€๋ฅผ ์„ ํƒํ•˜๋ฉด, ๊ฒ€์ƒ‰์–ด์™€ ์ผ์น˜ํ•˜๋Š” 24์‹œ๊ฐ„ ์ด๋‚ด ๋‰ด์Šค๋ฅผ ์ตœ๋Œ€ 100๊ฐœ ์ถœ๋ ฅํ•ฉ๋‹ˆ๋‹ค.")
159
 
@@ -163,15 +117,11 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as
163
  country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ€", value="South Korea")
164
  search_button = gr.Button("๊ฒ€์ƒ‰")
165
 
166
- # ์ƒํƒœ ๋ฉ”์‹œ์ง€ ์ปดํฌ๋„ŒํŠธ ์ถ”๊ฐ€
167
  status_message = gr.Markdown(visible=False)
 
168
 
169
- # ๊ธฐ์‚ฌ ๋ฐ์ดํ„ฐ๋ฅผ ์ €์žฅํ•  ์ƒํƒœ ๋ณ€์ˆ˜
170
- articles_state = gr.State([]) # ์ดˆ๊ธฐ๊ฐ’์„ ๋นˆ ๋ฆฌ์ŠคํŠธ๋กœ ์„ค์ •
171
-
172
- # ์ตœ๋Œ€ 100๊ฐœ์˜ ๊ธฐ์‚ฌ์— ๋Œ€ํ•œ ์ปดํฌ๋„ŒํŠธ๋ฅผ ๋ฏธ๋ฆฌ ์ƒ์„ฑํ•ฉ๋‹ˆ๋‹ค.
173
  article_components = []
174
- for i in range(100): # 100๊ฐœ์˜ ์ปดํฌ๋„ŒํŠธ ์ƒ์„ฑ
175
  with gr.Group(visible=False) as article_group:
176
  title = gr.Markdown()
177
  image = gr.Image(width=200, height=150)
@@ -192,21 +142,15 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as
192
  })
193
 
194
  def search_and_display(query, country, articles_state):
195
- # ์ƒํƒœ ๋ฉ”์‹œ์ง€ ํ‘œ์‹œ
196
- status_update = gr.update(value="์ฒ˜๋ฆฌ์ค‘์ž…๋‹ˆ๋‹ค. ์ž ์‹œ๋งŒ ๊ธฐ๋‹ค๋ฆฌ์„ธ์š”.", visible=True)
197
-
198
  error_message, articles = serphouse_search(query, country)
199
  outputs = []
 
200
  if error_message:
201
  outputs.append(gr.update(value=error_message, visible=True))
202
  for comp in article_components:
203
  outputs.extend([
204
- gr.update(visible=False), # group
205
- gr.update(), # title
206
- gr.update(), # image
207
- gr.update(), # snippet
208
- gr.update(), # info
209
- gr.update(visible=False), # summary_output
210
  ])
211
  articles_state = []
212
  else:
@@ -214,80 +158,56 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as
214
  for idx, comp in enumerate(article_components):
215
  if idx < len(articles):
216
  article = articles[idx]
217
- # ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ์ˆ˜์ •
218
  image_url = article['image_url']
219
- if image_url and not image_url.startswith('data:image'):
220
- image_update = gr.update(value=image_url, visible=True)
221
- else:
222
- image_update = gr.update(value=None, visible=False)
223
 
224
  outputs.extend([
225
- gr.update(visible=True), # group
226
- gr.update(value=f"### [{article['title']}]({article['link']})"), # title
227
- image_update, # image
228
- gr.update(value=f"**์š”์•ฝ:** {article['snippet']}"), # snippet
229
- gr.update(value=f"**์ถœ์ฒ˜:** {article['channel']} | **์‹œ๊ฐ„:** {article['time']}"), # info
230
- gr.update(visible=False), # summary_output
231
  ])
232
  else:
233
  outputs.extend([
234
- gr.update(visible=False), # group
235
- gr.update(), # title
236
- gr.update(), # image
237
- gr.update(), # snippet
238
- gr.update(), # info
239
- gr.update(visible=False), # summary_output
240
  ])
241
- articles_state = articles # articles_state ์—…๋ฐ์ดํŠธ
 
242
  outputs.append(articles_state)
243
- # ์ƒํƒœ ๋ฉ”์‹œ์ง€ ์ˆจ๊น€
244
  outputs.append(gr.update(visible=False))
245
  return outputs
246
 
247
- # search_button ํด๋ฆญ ์‹œ ์—…๋ฐ์ดํŠธ๋  ์ถœ๋ ฅ ์ปดํฌ๋„ŒํŠธ ๋ชฉ๋ก ์ƒ์„ฑ
248
- search_outputs = []
249
- error_output = gr.Markdown(visible=False)
250
- search_outputs.append(error_output)
251
  for comp in article_components:
252
- search_outputs.append(comp['group'])
253
- search_outputs.append(comp['title'])
254
- search_outputs.append(comp['image'])
255
- search_outputs.append(comp['snippet'])
256
- search_outputs.append(comp['info'])
257
- search_outputs.append(comp['summary_output'])
258
- search_outputs.append(articles_state)
259
- search_outputs.append(status_message) # ์ƒํƒœ ๋ฉ”์‹œ์ง€ ์ถœ๋ ฅ์— ์ถ”๊ฐ€
260
 
261
  search_button.click(
262
  search_and_display,
263
  inputs=[query, country, articles_state],
264
  outputs=search_outputs,
265
- show_progress=False # Gradio์˜ ๊ธฐ๋ณธ ๋กœ๋”ฉ ํ‘œ์‹œ๋ฅผ ๋•๋‹ˆ๋‹ค.
266
  )
267
 
268
- # ๋ถ„์„ ๋ฒ„ํŠผ ํด๋ฆญ ์ด๋ฒคํŠธ ์„ค์ •
269
  for idx, comp in enumerate(article_components):
270
  def create_analyze_function(index=idx):
271
  def analyze_article(articles):
272
- # ์ƒํƒœ ๋ฉ”์‹œ์ง€ ํ‘œ์‹œ
273
- status_update = gr.update(value="์ฒ˜๋ฆฌ์ค‘์ž…๋‹ˆ๋‹ค. ์ž ์‹œ๋งŒ ๊ธฐ๋‹ค๋ฆฌ์„ธ์š”.", visible=True)
274
  if articles and index < len(articles):
275
  article = articles[index]
276
  summary = summarize_article(article['title'], article['snippet'])
277
- # ์ƒํƒœ ๋ฉ”์‹œ์ง€ ์ˆจ๊น€
278
  return gr.update(value=summary, visible=True), gr.update(visible=False)
279
- else:
280
- # ์ƒํƒœ ๋ฉ”์‹œ์ง€ ์ˆจ๊น€
281
- return gr.update(value="๊ธฐ์‚ฌ ์ •๋ณด๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.", visible=True), gr.update(visible=False)
282
  return analyze_article
283
 
284
- # ๋ถ„์„ ๋ฒ„ํŠผ์˜ ์ถœ๋ ฅ์— ์ƒํƒœ ๋ฉ”์‹œ์ง€๋ฅผ ์ถ”๊ฐ€ํ•ฉ๋‹ˆ๋‹ค.
285
  comp['analyze_button'].click(
286
  create_analyze_function(),
287
  inputs=[articles_state],
288
  outputs=[comp['summary_output'], status_message],
289
- show_progress=True # Gradio์˜ ๊ธฐ๋ณธ ๋กœ๋”ฉ ํ‘œ์‹œ๋ฅผ ๋•๋‹ˆ๋‹ค.
290
  )
291
 
292
- #iface.launch(auth=("gini", "pick"))
293
  iface.launch()
 
3
  import json
4
  import os
5
  from datetime import datetime, timedelta
6
+ from huggingface_hub import InferenceClient
7
+
8
+ API_KEY = os.getenv("SERPHOUSE_API_KEY")
9
+ HF_TOKEN = os.getenv("HF_TOKEN")
10
+
11
+ COUNTRY_DOMAINS = {
12
+ "United States": "google.com",
13
+ "United Kingdom": "google.co.uk",
14
+ "Canada": "google.ca",
15
+ "Australia": "google.com.au",
16
+ "Germany": "google.de",
17
+ "France": "google.fr",
18
+ "Japan": "google.co.jp",
19
+ "South Korea": "google.co.kr",
20
+ "China": "google.com.hk",
21
+ "India": "google.co.in",
22
+ "Brazil": "google.com.br",
23
+ "Mexico": "google.com.mx",
24
+ "Russia": "google.ru",
25
+ "Italy": "google.it",
26
+ "Spain": "google.es",
27
+ "Netherlands": "google.nl",
28
+ "Singapore": "google.com.sg",
29
+ "Hong Kong": "google.com.hk"
30
+ }
31
 
32
+ MAJOR_COUNTRIES = list(COUNTRY_DOMAINS.keys())
 
 
 
 
 
 
 
 
 
 
 
33
 
34
  def search_serphouse(query, country, page=1, num_result=100):
35
  url = "https://api.serphouse.com/serp/live"
36
+ domain = COUNTRY_DOMAINS.get(country, "google.com")
37
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  now = datetime.utcnow()
39
  yesterday = now - timedelta(days=1)
40
  date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"
 
42
  payload = {
43
  "data": {
44
  "q": query,
45
+ "domain": domain,
46
+ "loc": country,
47
  "lang": "en",
48
  "device": "desktop",
49
  "serp_type": "news",
 
65
  response.raise_for_status()
66
  return response.json()
67
  except requests.RequestException as e:
68
+ return {"error": f"Error: {str(e)}"}
 
 
 
69
 
70
  def format_results_from_raw(results):
71
+ if isinstance(results, dict) and "error" in results:
72
+ return "Error: " + results["error"], []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
73
 
74
+ try:
75
+ news_results = results.get('results', {}).get('results', {}).get('news', [])
76
  if not news_results:
77
  return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", []
78
 
79
  articles = []
 
80
  for idx, result in enumerate(news_results, 1):
 
 
 
 
 
 
 
81
  articles.append({
82
  "index": idx,
83
+ "title": result.get("title", "์ œ๋ชฉ ์—†์Œ"),
84
+ "link": result.get("url", result.get("link", "#")),
85
+ "snippet": result.get("snippet", "๋‚ด์šฉ ์—†์Œ"),
86
+ "channel": result.get("channel", result.get("source", "์•Œ ์ˆ˜ ์—†์Œ")),
87
+ "time": result.get("time", result.get("date", "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๊ฐ„")),
88
+ "image_url": result.get("img", result.get("thumbnail", ""))
89
  })
 
90
  return "", articles
 
91
  except Exception as e:
92
+ return f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}", []
 
93
 
94
  def serphouse_search(query, country):
95
+ results = search_serphouse(query, country)
96
+ return format_results_from_raw(results)
 
 
 
97
 
 
98
  hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)
99
 
100
  def summarize_article(title, snippet):
101
  try:
102
  prompt = f"๋‹ค์Œ ๋‰ด์Šค ์ œ๋ชฉ๊ณผ ์š”์•ฝ์„ ๋ฐ”ํƒ•์œผ๋กœ ํ•œ๊ตญ์–ด๋กœ 1๋ฌธ์žฅ์œผ๋กœ ๊ธ์ • ๋˜๋Š” ์ค‘๋ฆฝ ๋˜๋Š” ๋ถ€์ •์  ์„ฑ๊ฒฉ์˜ ๊ธฐ์‚ฌ์ธ์ง€ ํŒ๋‹จํ•˜๋ผ. ์ ˆ๋Œ€ ํ”„๋กฌํ”„ํŠธ ๋ฐ ์ง€์‹œ๋ฌธ ๋“ฑ์„ ๋…ธ์ถœํ•˜์ง€ ๋ง๊ณ  ์ค‘๋ณต์—†์ด ์˜ค๋กœ์ง€ 1๋ฌธ์žฅ์˜ ๊ฒฐ๊ณผ๊ฐ’๋งŒ ์ถœ๋ ฅํ•˜๋ผ.:\n์ œ๋ชฉ: {title}\n์š”์•ฝ: {snippet}"
103
+ return hf_client.text_generation(prompt, max_new_tokens=500)
 
104
  except Exception as e:
105
+ return f"๋ถ„์„ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
106
 
107
  css = """
108
+ footer {visibility: hidden;}
 
 
109
  """
110
 
 
111
  with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as iface:
112
  gr.Markdown("๊ฒ€์ƒ‰์–ด๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ์›ํ•˜๋Š” ๊ตญ๊ฐ€๋ฅผ ์„ ํƒํ•˜๋ฉด, ๊ฒ€์ƒ‰์–ด์™€ ์ผ์น˜ํ•˜๋Š” 24์‹œ๊ฐ„ ์ด๋‚ด ๋‰ด์Šค๋ฅผ ์ตœ๋Œ€ 100๊ฐœ ์ถœ๋ ฅํ•ฉ๋‹ˆ๋‹ค.")
113
 
 
117
  country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ€", value="South Korea")
118
  search_button = gr.Button("๊ฒ€์ƒ‰")
119
 
 
120
  status_message = gr.Markdown(visible=False)
121
+ articles_state = gr.State([])
122
 
 
 
 
 
123
  article_components = []
124
+ for i in range(100):
125
  with gr.Group(visible=False) as article_group:
126
  title = gr.Markdown()
127
  image = gr.Image(width=200, height=150)
 
142
  })
143
 
144
  def search_and_display(query, country, articles_state):
 
 
 
145
  error_message, articles = serphouse_search(query, country)
146
  outputs = []
147
+
148
  if error_message:
149
  outputs.append(gr.update(value=error_message, visible=True))
150
  for comp in article_components:
151
  outputs.extend([
152
+ gr.update(visible=False), gr.update(), gr.update(),
153
+ gr.update(), gr.update(), gr.update(visible=False),
 
 
 
 
154
  ])
155
  articles_state = []
156
  else:
 
158
  for idx, comp in enumerate(article_components):
159
  if idx < len(articles):
160
  article = articles[idx]
 
161
  image_url = article['image_url']
162
+ image_update = gr.update(value=image_url, visible=True) if image_url and not image_url.startswith('data:image') else gr.update(value=None, visible=False)
 
 
 
163
 
164
  outputs.extend([
165
+ gr.update(visible=True),
166
+ gr.update(value=f"### [{article['title']}]({article['link']})"),
167
+ image_update,
168
+ gr.update(value=f"**์š”์•ฝ:** {article['snippet']}"),
169
+ gr.update(value=f"**์ถœ์ฒ˜:** {article['channel']} | **์‹œ๊ฐ„:** {article['time']}"),
170
+ gr.update(visible=False),
171
  ])
172
  else:
173
  outputs.extend([
174
+ gr.update(visible=False), gr.update(), gr.update(),
175
+ gr.update(), gr.update(), gr.update(visible=False),
 
 
 
 
176
  ])
177
+ articles_state = articles
178
+
179
  outputs.append(articles_state)
 
180
  outputs.append(gr.update(visible=False))
181
  return outputs
182
 
183
+ search_outputs = [gr.Markdown(visible=False)]
 
 
 
184
  for comp in article_components:
185
+ search_outputs.extend([comp['group'], comp['title'], comp['image'],
186
+ comp['snippet'], comp['info'], comp['summary_output']])
187
+ search_outputs.extend([articles_state, status_message])
 
 
 
 
 
188
 
189
  search_button.click(
190
  search_and_display,
191
  inputs=[query, country, articles_state],
192
  outputs=search_outputs,
193
+ show_progress=False
194
  )
195
 
 
196
  for idx, comp in enumerate(article_components):
197
  def create_analyze_function(index=idx):
198
  def analyze_article(articles):
 
 
199
  if articles and index < len(articles):
200
  article = articles[index]
201
  summary = summarize_article(article['title'], article['snippet'])
 
202
  return gr.update(value=summary, visible=True), gr.update(visible=False)
203
+ return gr.update(value="๊ธฐ์‚ฌ ์ •๋ณด๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.", visible=True), gr.update(visible=False)
 
 
204
  return analyze_article
205
 
 
206
  comp['analyze_button'].click(
207
  create_analyze_function(),
208
  inputs=[articles_state],
209
  outputs=[comp['summary_output'], status_message],
210
+ show_progress=True
211
  )
212
 
 
213
  iface.launch()