seawolf2357
commited on
Update app-backup.py
Browse files- app-backup.py +91 -70
app-backup.py
CHANGED
@@ -1,9 +1,17 @@
|
|
1 |
import gradio as gr
|
2 |
import requests
|
3 |
import json
|
|
|
4 |
from datetime import datetime, timedelta
|
|
|
|
|
5 |
|
6 |
-
|
|
|
|
|
|
|
|
|
|
|
7 |
|
8 |
MAJOR_COUNTRIES = [
|
9 |
"United States", "United Kingdom", "Canada", "Australia", "Germany",
|
@@ -18,7 +26,7 @@ MAJOR_COUNTRIES = [
|
|
18 |
"Indonesia", "Philippines", "Vietnam", "Pakistan", "Bangladesh"
|
19 |
]
|
20 |
|
21 |
-
def search_serphouse(query, country, page=1, num_result=
|
22 |
url = "https://api.serphouse.com/serp/live"
|
23 |
|
24 |
now = datetime.utcnow()
|
@@ -58,11 +66,8 @@ def search_serphouse(query, country, page=1, num_result=100):
|
|
58 |
|
59 |
def format_results_from_raw(results):
|
60 |
try:
|
61 |
-
# ๋๋ฒ๊ทธ ์ ๋ณด ์๋ต
|
62 |
-
debug_info = ""
|
63 |
-
|
64 |
if isinstance(results, dict) and "error" in results:
|
65 |
-
return "Error: " + results["error"],
|
66 |
|
67 |
if not isinstance(results, dict):
|
68 |
raise ValueError("๊ฒฐ๊ณผ๊ฐ ์ฌ์ ํ์์ด ์๋๋๋ค.")
|
@@ -83,10 +88,9 @@ def format_results_from_raw(results):
|
|
83 |
news_results = []
|
84 |
|
85 |
if not news_results:
|
86 |
-
return "๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค.",
|
87 |
|
88 |
-
|
89 |
-
list_output = ""
|
90 |
|
91 |
for idx, result in enumerate(news_results, 1):
|
92 |
title = result.get("title", "์ ๋ชฉ ์์")
|
@@ -96,86 +100,103 @@ def format_results_from_raw(results):
|
|
96 |
time = result.get("time", result.get("date", "์ ์ ์๋ ์๊ฐ"))
|
97 |
image_url = result.get("img", result.get("thumbnail", ""))
|
98 |
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
<p>{thumbnail_html}</p>
|
110 |
-
<p>์์ฝ: {snippet}</p>
|
111 |
-
<p>์ถ์ฒ: {channel} | ์๊ฐ: {time}</p>
|
112 |
-
<hr>
|
113 |
-
</div>
|
114 |
-
"""
|
115 |
-
list_output += list_item
|
116 |
-
|
117 |
-
return list_output, ""
|
118 |
|
119 |
except Exception as e:
|
120 |
error_message = f"๊ฒฐ๊ณผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
121 |
-
return "Error: " + error_message,
|
122 |
|
123 |
def serphouse_search(query, country):
|
124 |
# ํ์ด์ง์ ๊ฒฐ๊ณผ ์์ ๊ธฐ๋ณธ๊ฐ์ ์ค์ ํฉ๋๋ค.
|
125 |
page = 1
|
126 |
-
num_result =
|
127 |
results = search_serphouse(query, country, page, num_result)
|
128 |
-
|
129 |
-
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
130 |
|
131 |
css = """
|
132 |
footer {
|
133 |
visibility: hidden;
|
134 |
}
|
135 |
-
/* '๋ด์ค ๊ฒฐ๊ณผ'์ '๋๋ฒ๊ทธ ์ ๋ณด' ํญ ์จ๊ธฐ๊ธฐ */
|
136 |
-
#tab-๋ด์ค_๊ฒฐ๊ณผ, #tab-๋๋ฒ๊ทธ_์ ๋ณด {
|
137 |
-
display: none !important;
|
138 |
-
}
|
139 |
-
/* 'ํ์ด์ง'์ '๊ฒฐ๊ณผ ์' ์
๋ ฅ ์์ ์จ๊ธฐ๊ธฐ */
|
140 |
-
.slider-container {
|
141 |
-
display: none !important;
|
142 |
-
}
|
143 |
"""
|
144 |
|
145 |
# Gradio ์ธํฐํ์ด์ค ๊ตฌ์ฑ
|
146 |
-
with gr.Blocks(
|
147 |
-
gr.Markdown("
|
148 |
-
gr.Markdown("๊ฒ์์ด๋ฅผ ์
๋ ฅํ๊ณ ๊ตญ๊ฐ๋ฅผ ์ ํํ์ฌ 24์๊ฐ ์ด๋ด์ ๋ด์ค ๊ฒฐ๊ณผ๋ฅผ ๊ฐ์ ธ์ต๋๋ค.")
|
149 |
|
150 |
-
with gr.
|
151 |
with gr.Row():
|
152 |
query = gr.Textbox(label="๊ฒ์์ด")
|
153 |
country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ", value="South Korea")
|
154 |
-
|
155 |
-
# with gr.Row():
|
156 |
-
# page = gr.Slider(1, 10, 1, label="ํ์ด์ง")
|
157 |
-
# num_result = gr.Slider(1, 100, 100, label="๊ฒฐ๊ณผ ์")
|
158 |
-
|
159 |
-
search_button = gr.Button("๊ฒ์")
|
160 |
-
|
161 |
-
# '๋ด์ค ๊ฒฐ๊ณผ'์ '๋๋ฒ๊ทธ ์ ๋ณด' ํญ ์ ๊ฑฐ
|
162 |
-
# with gr.Tab("๋ด์ค ๊ฒฐ๊ณผ"):
|
163 |
-
# news_output = gr.HTML(label="๋ด์ค ๊ฒฐ๊ณผ")
|
164 |
|
165 |
-
|
166 |
-
list_output = gr.HTML(label="๋ฆฌ์คํธ ๊ฒฐ๊ณผ") # HTML๋ก ๋ณ๊ฒฝ
|
167 |
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import gradio as gr
|
2 |
import requests
|
3 |
import json
|
4 |
+
import os
|
5 |
from datetime import datetime, timedelta
|
6 |
+
from bs4 import BeautifulSoup # ์น ํ์ด์ง์์ ํ
์คํธ๋ฅผ ์ถ์ถํ๊ธฐ ์ํด ์ฌ์ฉ
|
7 |
+
from huggingface_hub import InferenceClient # LLM ์ฌ์ฉ์ ์ํด ํ์
|
8 |
|
9 |
+
# ํ์ํ ํจํค์ง ์ค์น (ํ์ํ ๊ฒฝ์ฐ ์ฃผ์์ ์ ๊ฑฐํ๊ณ ์คํ)
|
10 |
+
# !pip install bs4 huggingface_hub
|
11 |
+
|
12 |
+
# ํ๊ฒฝ ๋ณ์์์ API ํค ๊ฐ์ ธ์ค๊ธฐ (API ํค๋ ์์ ํ๊ฒ ๊ด๋ฆฌ๋์ด์ผ ํฉ๋๋ค)
|
13 |
+
API_KEY = os.getenv("SERPHOUSE_API_KEY") # ๋ณธ์ธ์ SerpHouse API ํค๋ฅผ ํ๊ฒฝ ๋ณ์๋ก ์ค์ ํ์ธ์.
|
14 |
+
HF_TOKEN = os.getenv("HF_TOKEN") # Hugging Face API ํ ํฐ์ ํ๊ฒฝ ๋ณ์๋ก ์ค์ ํ์ธ์.
|
15 |
|
16 |
MAJOR_COUNTRIES = [
|
17 |
"United States", "United Kingdom", "Canada", "Australia", "Germany",
|
|
|
26 |
"Indonesia", "Philippines", "Vietnam", "Pakistan", "Bangladesh"
|
27 |
]
|
28 |
|
29 |
+
def search_serphouse(query, country, page=1, num_result=10):
|
30 |
url = "https://api.serphouse.com/serp/live"
|
31 |
|
32 |
now = datetime.utcnow()
|
|
|
66 |
|
67 |
def format_results_from_raw(results):
|
68 |
try:
|
|
|
|
|
|
|
69 |
if isinstance(results, dict) and "error" in results:
|
70 |
+
return "Error: " + results["error"], []
|
71 |
|
72 |
if not isinstance(results, dict):
|
73 |
raise ValueError("๊ฒฐ๊ณผ๊ฐ ์ฌ์ ํ์์ด ์๋๋๋ค.")
|
|
|
88 |
news_results = []
|
89 |
|
90 |
if not news_results:
|
91 |
+
return "๊ฒ์ ๊ฒฐ๊ณผ๊ฐ ์์ต๋๋ค.", []
|
92 |
|
93 |
+
articles = []
|
|
|
94 |
|
95 |
for idx, result in enumerate(news_results, 1):
|
96 |
title = result.get("title", "์ ๋ชฉ ์์")
|
|
|
100 |
time = result.get("time", result.get("date", "์ ์ ์๋ ์๊ฐ"))
|
101 |
image_url = result.get("img", result.get("thumbnail", ""))
|
102 |
|
103 |
+
articles.append({
|
104 |
+
"title": title,
|
105 |
+
"link": link,
|
106 |
+
"snippet": snippet,
|
107 |
+
"channel": channel,
|
108 |
+
"time": time,
|
109 |
+
"image_url": image_url
|
110 |
+
})
|
111 |
+
|
112 |
+
return "", articles
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
113 |
|
114 |
except Exception as e:
|
115 |
error_message = f"๊ฒฐ๊ณผ ์ฒ๋ฆฌ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
116 |
+
return "Error: " + error_message, []
|
117 |
|
118 |
def serphouse_search(query, country):
|
119 |
# ํ์ด์ง์ ๊ฒฐ๊ณผ ์์ ๊ธฐ๋ณธ๊ฐ์ ์ค์ ํฉ๋๋ค.
|
120 |
page = 1
|
121 |
+
num_result = 10
|
122 |
results = search_serphouse(query, country, page, num_result)
|
123 |
+
error_message, articles = format_results_from_raw(results)
|
124 |
+
return error_message, articles
|
125 |
+
|
126 |
+
# LLM ์ค์
|
127 |
+
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)
|
128 |
+
|
129 |
+
def summarize_article(url):
|
130 |
+
try:
|
131 |
+
# ์น ํ์ด์ง์์ ํ
์คํธ ์ถ์ถ
|
132 |
+
response = requests.get(url)
|
133 |
+
response.raise_for_status()
|
134 |
+
soup = BeautifulSoup(response.text, 'html.parser')
|
135 |
+
# ๋ชจ๋ ํ
์คํธ๋ฅผ ์ถ์ถ (๊ฐ๋จํ ์์)
|
136 |
+
text = ' '.join([p.get_text() for p in soup.find_all('p')])
|
137 |
+
if not text.strip():
|
138 |
+
return "๊ธฐ์ฌ ๋ด์ฉ์ ๊ฐ์ ธ์ฌ ์ ์์ต๋๋ค."
|
139 |
+
|
140 |
+
# ์์ฝ ์์ฑ
|
141 |
+
prompt = f"๋ค์ ์์ด ๊ธฐ์ฌ๋ฅผ ํ๊ตญ์ด๋ก 3๋ฌธ์ฅ์ผ๋ก ์์ฝํ์ธ์:\n{text}"
|
142 |
+
summary = hf_client.text_generation(prompt, max_new_tokens=500)
|
143 |
+
return summary
|
144 |
+
except Exception as e:
|
145 |
+
return f"์์ฝ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
146 |
|
147 |
css = """
|
148 |
footer {
|
149 |
visibility: hidden;
|
150 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
151 |
"""
|
152 |
|
153 |
# Gradio ์ธํฐํ์ด์ค ๊ตฌ์ฑ
|
154 |
+
with gr.Blocks(css=css, title="NewsAI ์๋น์ค") as iface:
|
155 |
+
gr.Markdown("๊ฒ์์ด๋ฅผ ์
๋ ฅํ๊ณ ์ํ๋ ๊ตญ๊ฐ๋ฅผ ์ ํํ๋ฉด, ๊ฒ์์ด์ ์ผ์นํ๋ 24์๊ฐ ์ด๋ด ๋ด์ค๋ฅผ ์ต๋ 10๊ฐ ์ถ๋ ฅํฉ๋๋ค.")
|
|
|
156 |
|
157 |
+
with gr.Column():
|
158 |
with gr.Row():
|
159 |
query = gr.Textbox(label="๊ฒ์์ด")
|
160 |
country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ", value="South Korea")
|
161 |
+
search_button = gr.Button("๊ฒ์")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
162 |
|
163 |
+
article_outputs = []
|
|
|
164 |
|
165 |
+
def search_and_display(query, country):
|
166 |
+
error_message, articles = serphouse_search(query, country)
|
167 |
+
if error_message:
|
168 |
+
return gr.update(visible=True, value=error_message)
|
169 |
+
else:
|
170 |
+
# ๊ธฐ์กด ์ถ๋ ฅ๋ฌผ ์ ๊ฑฐ
|
171 |
+
for components in article_outputs:
|
172 |
+
for component in components:
|
173 |
+
component.visible = False
|
174 |
+
article_outputs.clear()
|
175 |
+
|
176 |
+
# ๊ฐ ๊ธฐ์ฌ์ ๋ํด ์ถ๋ ฅ ์์ฑ
|
177 |
+
for article in articles:
|
178 |
+
with gr.Column():
|
179 |
+
title = gr.Markdown(f"### [{article['title']}]({article['link']})")
|
180 |
+
image = gr.Image(value=article['image_url'], visible=bool(article['image_url']), shape=(200, 150))
|
181 |
+
snippet = gr.Markdown(f"**์์ฝ:** {article['snippet']}")
|
182 |
+
info = gr.Markdown(f"**์ถ์ฒ:** {article['channel']} | **์๊ฐ:** {article['time']}")
|
183 |
+
analyze_button = gr.Button("๋ถ์")
|
184 |
+
summary_output = gr.Markdown(visible=False)
|
185 |
+
|
186 |
+
def analyze_article(url):
|
187 |
+
summary = summarize_article(url)
|
188 |
+
summary_output.update(value=summary, visible=True)
|
189 |
+
|
190 |
+
analyze_button.click(analyze_article, inputs=gr.State(article['link']), outputs=summary_output)
|
191 |
+
|
192 |
+
article_outputs.append([title, image, snippet, info, analyze_button, summary_output])
|
193 |
+
|
194 |
+
return gr.update()
|
195 |
+
|
196 |
+
search_button.click(
|
197 |
+
search_and_display,
|
198 |
+
inputs=[query, country],
|
199 |
+
outputs=[]
|
200 |
+
)
|
201 |
+
|
202 |
+
iface.launch(auth=("gini", "pick"))
|