Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
seawolf2357
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -3,46 +3,38 @@ import requests
|
|
3 |
import json
|
4 |
import os
|
5 |
from datetime import datetime, timedelta
|
6 |
-
from huggingface_hub import InferenceClient
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
-
MAJOR_COUNTRIES =
|
13 |
-
"United States", "United Kingdom", "Canada", "Australia", "Germany",
|
14 |
-
"France", "Japan", "South Korea", "China", "India",
|
15 |
-
"Brazil", "Mexico", "Russia", "Italy", "Spain",
|
16 |
-
"Netherlands", "Sweden", "Switzerland", "Norway", "Denmark",
|
17 |
-
"Finland", "Belgium", "Austria", "New Zealand", "Ireland",
|
18 |
-
"Singapore", "Hong Kong", "Israel", "United Arab Emirates", "Saudi Arabia",
|
19 |
-
"South Africa", "Turkey", "Egypt", "Poland", "Czech Republic",
|
20 |
-
"Hungary", "Greece", "Portugal", "Argentina", "Chile",
|
21 |
-
"Colombia", "Peru", "Venezuela", "Thailand", "Malaysia",
|
22 |
-
"Indonesia", "Philippines", "Vietnam", "Pakistan", "Bangladesh"
|
23 |
-
]
|
24 |
|
25 |
def search_serphouse(query, country, page=1, num_result=100):
|
26 |
url = "https://api.serphouse.com/serp/live"
|
27 |
-
|
28 |
-
|
29 |
-
country_domains = {
|
30 |
-
"United States": "google.com",
|
31 |
-
"United Kingdom": "google.co.uk",
|
32 |
-
"Canada": "google.ca",
|
33 |
-
"Australia": "google.com.au",
|
34 |
-
"Germany": "google.de",
|
35 |
-
"France": "google.fr",
|
36 |
-
"Japan": "google.co.jp",
|
37 |
-
"South Korea": "google.co.kr",
|
38 |
-
"China": "google.com.hk", # ์ค๊ตญ์ ํ์ฝฉ ๋๋ฉ์ธ ์ฌ์ฉ
|
39 |
-
"India": "google.co.in",
|
40 |
-
# ํ์ํ ๋ค๋ฅธ ๊ตญ๊ฐ๋ค์ ๋๋ฉ์ธ์ ์ถ๊ฐํ ์ ์์ต๋๋ค.
|
41 |
-
}
|
42 |
-
|
43 |
-
# ์ ํ๋ ๊ตญ๊ฐ์ ๋๋ฉ์ธ ๊ฐ์ ธ์ค๊ธฐ (์์ผ๋ฉด ๊ธฐ๋ณธ๊ฐ google.com ์ฌ์ฉ)
|
44 |
-
domain = country_domains.get(country, "google.com")
|
45 |
-
|
46 |
now = datetime.utcnow()
|
47 |
yesterday = now - timedelta(days=1)
|
48 |
date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"
|
@@ -50,8 +42,8 @@ def search_serphouse(query, country, page=1, num_result=100):
|
|
50 |
payload = {
|
51 |
"data": {
|
52 |
"q": query,
|
53 |
-
"domain": domain,
|
54 |
-
"loc": country,
|
55 |
"lang": "en",
|
56 |
"device": "desktop",
|
57 |
"serp_type": "news",
|
@@ -73,87 +65,49 @@ def search_serphouse(query, country, page=1, num_result=100):
|
|
73 |
response.raise_for_status()
|
74 |
return response.json()
|
75 |
except requests.RequestException as e:
|
76 |
-
|
77 |
-
if hasattr(response, 'text'):
|
78 |
-
error_msg += f"\nResponse content: {response.text}"
|
79 |
-
return {"error": error_msg}
|
80 |
|
81 |
def format_results_from_raw(results):
|
82 |
-
|
83 |
-
|
84 |
-
return "Error: " + results["error"], []
|
85 |
-
|
86 |
-
if not isinstance(results, dict):
|
87 |
-
raise ValueError("๊ฒฐ๊ณผ๊ฐ ์ฌ์ ํ์์ด ์๋๋๋ค.")
|
88 |
-
|
89 |
-
# 'results' ํค ๋ด๋ถ์ ๊ตฌ์กฐ ํ์ธ
|
90 |
-
if 'results' in results:
|
91 |
-
results_content = results['results']
|
92 |
-
if 'results' in results_content:
|
93 |
-
results_content = results_content['results']
|
94 |
-
if 'news' in results_content:
|
95 |
-
news_results = results_content['news']
|
96 |
-
else:
|
97 |
-
news_results = []
|
98 |
-
else:
|
99 |
-
news_results = []
|
100 |
-
else:
|
101 |
-
news_results = []
|
102 |
|
|
|
|
|
103 |
if not news_results:
|
104 |
return "๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค.", []
|
105 |
|
106 |
articles = []
|
107 |
-
|
108 |
for idx, result in enumerate(news_results, 1):
|
109 |
-
title = result.get("title", "์ ๋ชฉ ์์")
|
110 |
-
link = result.get("url", result.get("link", "#"))
|
111 |
-
snippet = result.get("snippet", "๋ด์ฉ ์์")
|
112 |
-
channel = result.get("channel", result.get("source", "์ ์ ์์"))
|
113 |
-
time = result.get("time", result.get("date", "์ ์ ์๋ ์๊ฐ"))
|
114 |
-
image_url = result.get("img", result.get("thumbnail", ""))
|
115 |
-
|
116 |
articles.append({
|
117 |
"index": idx,
|
118 |
-
"title": title,
|
119 |
-
"link": link,
|
120 |
-
"snippet": snippet,
|
121 |
-
"channel": channel,
|
122 |
-
"time": time,
|
123 |
-
"image_url":
|
124 |
})
|
125 |
-
|
126 |
return "", articles
|
127 |
-
|
128 |
except Exception as e:
|
129 |
-
|
130 |
-
return "Error: " + error_message, []
|
131 |
|
132 |
def serphouse_search(query, country):
|
133 |
-
|
134 |
-
|
135 |
-
results = search_serphouse(query, country, page, num_result)
|
136 |
-
error_message, articles = format_results_from_raw(results)
|
137 |
-
return error_message, articles
|
138 |
|
139 |
-
# LLM ์ค์
|
140 |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)
|
141 |
|
142 |
def summarize_article(title, snippet):
|
143 |
try:
|
144 |
prompt = f"๋ค์ ๋ด์ค ์ ๋ชฉ๊ณผ ์์ฝ์ ๋ฐํ์ผ๋ก ํ๊ตญ์ด๋ก 1๋ฌธ์ฅ์ผ๋ก ๊ธ์ ๋๋ ์ค๋ฆฝ ๋๋ ๋ถ์ ์ ์ฑ๊ฒฉ์ ๊ธฐ์ฌ์ธ์ง ํ๋จํ๋ผ. ์ ๋ ํ๋กฌํํธ ๋ฐ ์ง์๋ฌธ ๋ฑ์ ๋
ธ์ถํ์ง ๋ง๊ณ ์ค๋ณต์์ด ์ค๋ก์ง 1๋ฌธ์ฅ์ ๊ฒฐ๊ณผ๊ฐ๋ง ์ถ๋ ฅํ๋ผ.:\n์ ๋ชฉ: {title}\n์์ฝ: {snippet}"
|
145 |
-
|
146 |
-
return summary
|
147 |
except Exception as e:
|
148 |
-
return f"
|
149 |
|
150 |
css = """
|
151 |
-
footer {
|
152 |
-
visibility: hidden;
|
153 |
-
}
|
154 |
"""
|
155 |
|
156 |
-
# Gradio ์ธํฐํ์ด์ค ๊ตฌ์ฑ
|
157 |
with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์๋น์ค") as iface:
|
158 |
gr.Markdown("๊ฒ์์ด๋ฅผ ์
๋ ฅํ๊ณ ์ํ๋ ๊ตญ๊ฐ๋ฅผ ์ ํํ๋ฉด, ๊ฒ์์ด์ ์ผ์นํ๋ 24์๊ฐ ์ด๋ด ๋ด์ค๋ฅผ ์ต๋ 100๊ฐ ์ถ๋ ฅํฉ๋๋ค.")
|
159 |
|
@@ -163,15 +117,11 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์๋น์ค") as
|
|
163 |
country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ", value="South Korea")
|
164 |
search_button = gr.Button("๊ฒ์")
|
165 |
|
166 |
-
# ์ํ ๋ฉ์์ง ์ปดํฌ๋ํธ ์ถ๊ฐ
|
167 |
status_message = gr.Markdown(visible=False)
|
|
|
168 |
|
169 |
-
# ๊ธฐ์ฌ ๋ฐ์ดํฐ๋ฅผ ์ ์ฅํ ์ํ ๋ณ์
|
170 |
-
articles_state = gr.State([]) # ์ด๊ธฐ๊ฐ์ ๋น ๋ฆฌ์คํธ๋ก ์ค์
|
171 |
-
|
172 |
-
# ์ต๋ 100๊ฐ์ ๊ธฐ์ฌ์ ๋ํ ์ปดํฌ๋ํธ๋ฅผ ๋ฏธ๋ฆฌ ์์ฑํฉ๋๋ค.
|
173 |
article_components = []
|
174 |
-
for i in range(100):
|
175 |
with gr.Group(visible=False) as article_group:
|
176 |
title = gr.Markdown()
|
177 |
image = gr.Image(width=200, height=150)
|
@@ -192,21 +142,15 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์๋น์ค") as
|
|
192 |
})
|
193 |
|
194 |
def search_and_display(query, country, articles_state):
|
195 |
-
# ์ํ ๋ฉ์์ง ํ์
|
196 |
-
status_update = gr.update(value="์ฒ๋ฆฌ์ค์
๋๋ค. ์ ์๋ง ๊ธฐ๋ค๋ฆฌ์ธ์.", visible=True)
|
197 |
-
|
198 |
error_message, articles = serphouse_search(query, country)
|
199 |
outputs = []
|
|
|
200 |
if error_message:
|
201 |
outputs.append(gr.update(value=error_message, visible=True))
|
202 |
for comp in article_components:
|
203 |
outputs.extend([
|
204 |
-
gr.update(visible=False),
|
205 |
-
gr.update(),
|
206 |
-
gr.update(), # image
|
207 |
-
gr.update(), # snippet
|
208 |
-
gr.update(), # info
|
209 |
-
gr.update(visible=False), # summary_output
|
210 |
])
|
211 |
articles_state = []
|
212 |
else:
|
@@ -214,80 +158,56 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์๋น์ค") as
|
|
214 |
for idx, comp in enumerate(article_components):
|
215 |
if idx < len(articles):
|
216 |
article = articles[idx]
|
217 |
-
# ์ด๋ฏธ์ง ์ฒ๋ฆฌ ์์
|
218 |
image_url = article['image_url']
|
219 |
-
if image_url and not image_url.startswith('data:image')
|
220 |
-
image_update = gr.update(value=image_url, visible=True)
|
221 |
-
else:
|
222 |
-
image_update = gr.update(value=None, visible=False)
|
223 |
|
224 |
outputs.extend([
|
225 |
-
gr.update(visible=True),
|
226 |
-
gr.update(value=f"### [{article['title']}]({article['link']})"),
|
227 |
-
image_update,
|
228 |
-
gr.update(value=f"**์์ฝ:** {article['snippet']}"),
|
229 |
-
gr.update(value=f"**์ถ์ฒ:** {article['channel']} | **์๊ฐ:** {article['time']}"),
|
230 |
-
gr.update(visible=False),
|
231 |
])
|
232 |
else:
|
233 |
outputs.extend([
|
234 |
-
gr.update(visible=False),
|
235 |
-
gr.update(),
|
236 |
-
gr.update(), # image
|
237 |
-
gr.update(), # snippet
|
238 |
-
gr.update(), # info
|
239 |
-
gr.update(visible=False), # summary_output
|
240 |
])
|
241 |
-
articles_state = articles
|
|
|
242 |
outputs.append(articles_state)
|
243 |
-
# ์ํ ๋ฉ์์ง ์จ๊น
|
244 |
outputs.append(gr.update(visible=False))
|
245 |
return outputs
|
246 |
|
247 |
-
|
248 |
-
search_outputs = []
|
249 |
-
error_output = gr.Markdown(visible=False)
|
250 |
-
search_outputs.append(error_output)
|
251 |
for comp in article_components:
|
252 |
-
search_outputs.
|
253 |
-
|
254 |
-
|
255 |
-
search_outputs.append(comp['snippet'])
|
256 |
-
search_outputs.append(comp['info'])
|
257 |
-
search_outputs.append(comp['summary_output'])
|
258 |
-
search_outputs.append(articles_state)
|
259 |
-
search_outputs.append(status_message) # ์ํ ๋ฉ์์ง ์ถ๋ ฅ์ ์ถ๊ฐ
|
260 |
|
261 |
search_button.click(
|
262 |
search_and_display,
|
263 |
inputs=[query, country, articles_state],
|
264 |
outputs=search_outputs,
|
265 |
-
show_progress=False
|
266 |
)
|
267 |
|
268 |
-
# ๋ถ์ ๋ฒํผ ํด๋ฆญ ์ด๋ฒคํธ ์ค์
|
269 |
for idx, comp in enumerate(article_components):
|
270 |
def create_analyze_function(index=idx):
|
271 |
def analyze_article(articles):
|
272 |
-
# ์ํ ๋ฉ์์ง ํ์
|
273 |
-
status_update = gr.update(value="์ฒ๋ฆฌ์ค์
๋๋ค. ์ ์๋ง ๊ธฐ๋ค๋ฆฌ์ธ์.", visible=True)
|
274 |
if articles and index < len(articles):
|
275 |
article = articles[index]
|
276 |
summary = summarize_article(article['title'], article['snippet'])
|
277 |
-
# ์ํ ๋ฉ์์ง ์จ๊น
|
278 |
return gr.update(value=summary, visible=True), gr.update(visible=False)
|
279 |
-
|
280 |
-
# ์ํ ๋ฉ์์ง ์จ๊น
|
281 |
-
return gr.update(value="๊ธฐ์ฌ ์ ๋ณด๋ฅผ ์ฐพ์ ์ ์์ต๋๋ค.", visible=True), gr.update(visible=False)
|
282 |
return analyze_article
|
283 |
|
284 |
-
# ๋ถ์ ๋ฒํผ์ ์ถ๋ ฅ์ ์ํ ๋ฉ์์ง๋ฅผ ์ถ๊ฐํฉ๋๋ค.
|
285 |
comp['analyze_button'].click(
|
286 |
create_analyze_function(),
|
287 |
inputs=[articles_state],
|
288 |
outputs=[comp['summary_output'], status_message],
|
289 |
-
show_progress=True
|
290 |
)
|
291 |
|
292 |
-
#iface.launch(auth=("gini", "pick"))
|
293 |
iface.launch()
|
|
|
3 |
import json
|
4 |
import os
|
5 |
from datetime import datetime, timedelta
|
6 |
+
from huggingface_hub import InferenceClient
|
7 |
+
|
8 |
+
API_KEY = os.getenv("SERPHOUSE_API_KEY")
|
9 |
+
HF_TOKEN = os.getenv("HF_TOKEN")
|
10 |
+
|
11 |
+
COUNTRY_DOMAINS = {
|
12 |
+
"United States": "google.com",
|
13 |
+
"United Kingdom": "google.co.uk",
|
14 |
+
"Canada": "google.ca",
|
15 |
+
"Australia": "google.com.au",
|
16 |
+
"Germany": "google.de",
|
17 |
+
"France": "google.fr",
|
18 |
+
"Japan": "google.co.jp",
|
19 |
+
"South Korea": "google.co.kr",
|
20 |
+
"China": "google.com.hk",
|
21 |
+
"India": "google.co.in",
|
22 |
+
"Brazil": "google.com.br",
|
23 |
+
"Mexico": "google.com.mx",
|
24 |
+
"Russia": "google.ru",
|
25 |
+
"Italy": "google.it",
|
26 |
+
"Spain": "google.es",
|
27 |
+
"Netherlands": "google.nl",
|
28 |
+
"Singapore": "google.com.sg",
|
29 |
+
"Hong Kong": "google.com.hk"
|
30 |
+
}
|
31 |
|
32 |
+
MAJOR_COUNTRIES = list(COUNTRY_DOMAINS.keys())
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
|
34 |
def search_serphouse(query, country, page=1, num_result=100):
|
35 |
url = "https://api.serphouse.com/serp/live"
|
36 |
+
domain = COUNTRY_DOMAINS.get(country, "google.com")
|
37 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
38 |
now = datetime.utcnow()
|
39 |
yesterday = now - timedelta(days=1)
|
40 |
date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"
|
|
|
42 |
payload = {
|
43 |
"data": {
|
44 |
"q": query,
|
45 |
+
"domain": domain,
|
46 |
+
"loc": country,
|
47 |
"lang": "en",
|
48 |
"device": "desktop",
|
49 |
"serp_type": "news",
|
|
|
65 |
response.raise_for_status()
|
66 |
return response.json()
|
67 |
except requests.RequestException as e:
|
68 |
+
return {"error": f"Error: {str(e)}"}
|
|
|
|
|
|
|
69 |
|
70 |
def format_results_from_raw(results):
|
71 |
+
if isinstance(results, dict) and "error" in results:
|
72 |
+
return "Error: " + results["error"], []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
73 |
|
74 |
+
try:
|
75 |
+
news_results = results.get('results', {}).get('results', {}).get('news', [])
|
76 |
if not news_results:
|
77 |
return "๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค.", []
|
78 |
|
79 |
articles = []
|
|
|
80 |
for idx, result in enumerate(news_results, 1):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
81 |
articles.append({
|
82 |
"index": idx,
|
83 |
+
"title": result.get("title", "์ ๋ชฉ ์์"),
|
84 |
+
"link": result.get("url", result.get("link", "#")),
|
85 |
+
"snippet": result.get("snippet", "๋ด์ฉ ์์"),
|
86 |
+
"channel": result.get("channel", result.get("source", "์ ์ ์์")),
|
87 |
+
"time": result.get("time", result.get("date", "์ ์ ์๋ ์๊ฐ")),
|
88 |
+
"image_url": result.get("img", result.get("thumbnail", ""))
|
89 |
})
|
|
|
90 |
return "", articles
|
|
|
91 |
except Exception as e:
|
92 |
+
return f"๊ฒฐ๊ณผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}", []
|
|
|
93 |
|
94 |
def serphouse_search(query, country):
|
95 |
+
results = search_serphouse(query, country)
|
96 |
+
return format_results_from_raw(results)
|
|
|
|
|
|
|
97 |
|
|
|
98 |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)
|
99 |
|
100 |
def summarize_article(title, snippet):
|
101 |
try:
|
102 |
prompt = f"๋ค์ ๋ด์ค ์ ๋ชฉ๊ณผ ์์ฝ์ ๋ฐํ์ผ๋ก ํ๊ตญ์ด๋ก 1๋ฌธ์ฅ์ผ๋ก ๊ธ์ ๋๋ ์ค๋ฆฝ ๋๋ ๋ถ์ ์ ์ฑ๊ฒฉ์ ๊ธฐ์ฌ์ธ์ง ํ๋จํ๋ผ. ์ ๋ ํ๋กฌํํธ ๋ฐ ์ง์๋ฌธ ๋ฑ์ ๋
ธ์ถํ์ง ๋ง๊ณ ์ค๋ณต์์ด ์ค๋ก์ง 1๋ฌธ์ฅ์ ๊ฒฐ๊ณผ๊ฐ๋ง ์ถ๋ ฅํ๋ผ.:\n์ ๋ชฉ: {title}\n์์ฝ: {snippet}"
|
103 |
+
return hf_client.text_generation(prompt, max_new_tokens=500)
|
|
|
104 |
except Exception as e:
|
105 |
+
return f"๋ถ์ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
106 |
|
107 |
css = """
|
108 |
+
footer {visibility: hidden;}
|
|
|
|
|
109 |
"""
|
110 |
|
|
|
111 |
with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์๋น์ค") as iface:
|
112 |
gr.Markdown("๊ฒ์์ด๋ฅผ ์
๋ ฅํ๊ณ ์ํ๋ ๊ตญ๊ฐ๋ฅผ ์ ํํ๋ฉด, ๊ฒ์์ด์ ์ผ์นํ๋ 24์๊ฐ ์ด๋ด ๋ด์ค๋ฅผ ์ต๋ 100๊ฐ ์ถ๋ ฅํฉ๋๋ค.")
|
113 |
|
|
|
117 |
country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ", value="South Korea")
|
118 |
search_button = gr.Button("๊ฒ์")
|
119 |
|
|
|
120 |
status_message = gr.Markdown(visible=False)
|
121 |
+
articles_state = gr.State([])
|
122 |
|
|
|
|
|
|
|
|
|
123 |
article_components = []
|
124 |
+
for i in range(100):
|
125 |
with gr.Group(visible=False) as article_group:
|
126 |
title = gr.Markdown()
|
127 |
image = gr.Image(width=200, height=150)
|
|
|
142 |
})
|
143 |
|
144 |
def search_and_display(query, country, articles_state):
|
|
|
|
|
|
|
145 |
error_message, articles = serphouse_search(query, country)
|
146 |
outputs = []
|
147 |
+
|
148 |
if error_message:
|
149 |
outputs.append(gr.update(value=error_message, visible=True))
|
150 |
for comp in article_components:
|
151 |
outputs.extend([
|
152 |
+
gr.update(visible=False), gr.update(), gr.update(),
|
153 |
+
gr.update(), gr.update(), gr.update(visible=False),
|
|
|
|
|
|
|
|
|
154 |
])
|
155 |
articles_state = []
|
156 |
else:
|
|
|
158 |
for idx, comp in enumerate(article_components):
|
159 |
if idx < len(articles):
|
160 |
article = articles[idx]
|
|
|
161 |
image_url = article['image_url']
|
162 |
+
image_update = gr.update(value=image_url, visible=True) if image_url and not image_url.startswith('data:image') else gr.update(value=None, visible=False)
|
|
|
|
|
|
|
163 |
|
164 |
outputs.extend([
|
165 |
+
gr.update(visible=True),
|
166 |
+
gr.update(value=f"### [{article['title']}]({article['link']})"),
|
167 |
+
image_update,
|
168 |
+
gr.update(value=f"**์์ฝ:** {article['snippet']}"),
|
169 |
+
gr.update(value=f"**์ถ์ฒ:** {article['channel']} | **์๊ฐ:** {article['time']}"),
|
170 |
+
gr.update(visible=False),
|
171 |
])
|
172 |
else:
|
173 |
outputs.extend([
|
174 |
+
gr.update(visible=False), gr.update(), gr.update(),
|
175 |
+
gr.update(), gr.update(), gr.update(visible=False),
|
|
|
|
|
|
|
|
|
176 |
])
|
177 |
+
articles_state = articles
|
178 |
+
|
179 |
outputs.append(articles_state)
|
|
|
180 |
outputs.append(gr.update(visible=False))
|
181 |
return outputs
|
182 |
|
183 |
+
search_outputs = [gr.Markdown(visible=False)]
|
|
|
|
|
|
|
184 |
for comp in article_components:
|
185 |
+
search_outputs.extend([comp['group'], comp['title'], comp['image'],
|
186 |
+
comp['snippet'], comp['info'], comp['summary_output']])
|
187 |
+
search_outputs.extend([articles_state, status_message])
|
|
|
|
|
|
|
|
|
|
|
188 |
|
189 |
search_button.click(
|
190 |
search_and_display,
|
191 |
inputs=[query, country, articles_state],
|
192 |
outputs=search_outputs,
|
193 |
+
show_progress=False
|
194 |
)
|
195 |
|
|
|
196 |
for idx, comp in enumerate(article_components):
|
197 |
def create_analyze_function(index=idx):
|
198 |
def analyze_article(articles):
|
|
|
|
|
199 |
if articles and index < len(articles):
|
200 |
article = articles[index]
|
201 |
summary = summarize_article(article['title'], article['snippet'])
|
|
|
202 |
return gr.update(value=summary, visible=True), gr.update(visible=False)
|
203 |
+
return gr.update(value="๊ธฐ์ฌ ์ ๋ณด๋ฅผ ์ฐพ์ ์ ์์ต๋๋ค.", visible=True), gr.update(visible=False)
|
|
|
|
|
204 |
return analyze_article
|
205 |
|
|
|
206 |
comp['analyze_button'].click(
|
207 |
create_analyze_function(),
|
208 |
inputs=[articles_state],
|
209 |
outputs=[comp['summary_output'], status_message],
|
210 |
+
show_progress=True
|
211 |
)
|
212 |
|
|
|
213 |
iface.launch()
|