seawolf2357 commited on
Commit
7ff2d0a
ยท
verified ยท
1 Parent(s): 5313f78

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -63
app.py CHANGED
@@ -3,37 +3,18 @@ import requests
3
  import json
4
  import os
5
  from datetime import datetime, timedelta
6
- from huggingface_hub import InferenceClient
7
 
8
  API_KEY = os.getenv("SERPHOUSE_API_KEY")
9
- HF_TOKEN = os.getenv("HF_TOKEN")
10
-
11
- COUNTRY_DOMAINS = {
12
- "United States": "google.com",
13
- "United Kingdom": "google.co.uk",
14
- "Canada": "google.ca",
15
- "Australia": "google.com.au",
16
- "Germany": "google.de",
17
- "France": "google.fr",
18
- "Japan": "google.co.jp",
19
- "South Korea": "google.co.kr",
20
- "China": "google.com.hk",
21
- "India": "google.co.in",
22
- "Brazil": "google.com.br",
23
- "Mexico": "google.com.mx",
24
- "Russia": "google.ru",
25
- "Italy": "google.it",
26
- "Spain": "google.es",
27
- "Netherlands": "google.nl",
28
- "Singapore": "google.com.sg",
29
- "Hong Kong": "google.com.hk"
30
- }
31
-
32
- MAJOR_COUNTRIES = list(COUNTRY_DOMAINS.keys())
33
 
34
  def search_serphouse(query, country, page=1, num_result=100):
35
  url = "https://api.serphouse.com/serp/live"
36
- domain = COUNTRY_DOMAINS.get(country, "google.com")
37
 
38
  now = datetime.utcnow()
39
  yesterday = now - timedelta(days=1)
@@ -42,8 +23,8 @@ def search_serphouse(query, country, page=1, num_result=100):
42
  payload = {
43
  "data": {
44
  "q": query,
45
- "domain": domain,
46
- "loc": country,
47
  "lang": "en",
48
  "device": "desktop",
49
  "serp_type": "news",
@@ -95,15 +76,6 @@ def serphouse_search(query, country):
95
  results = search_serphouse(query, country)
96
  return format_results_from_raw(results)
97
 
98
- hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=HF_TOKEN)
99
-
100
- def summarize_article(title, snippet):
101
- try:
102
- prompt = f"๋‹ค์Œ ๋‰ด์Šค ์ œ๋ชฉ๊ณผ ์š”์•ฝ์„ ๋ฐ”ํƒ•์œผ๋กœ ํ•œ๊ตญ์–ด๋กœ 1๋ฌธ์žฅ์œผ๋กœ ๊ธ์ • ๋˜๋Š” ์ค‘๋ฆฝ ๋˜๋Š” ๋ถ€์ •์  ์„ฑ๊ฒฉ์˜ ๊ธฐ์‚ฌ์ธ์ง€ ํŒ๋‹จํ•˜๋ผ. ์ ˆ๋Œ€ ํ”„๋กฌํ”„ํŠธ ๋ฐ ์ง€์‹œ๋ฌธ ๋“ฑ์„ ๋…ธ์ถœํ•˜์ง€ ๋ง๊ณ  ์ค‘๋ณต์—†์ด ์˜ค๋กœ์ง€ 1๋ฌธ์žฅ์˜ ๊ฒฐ๊ณผ๊ฐ’๋งŒ ์ถœ๋ ฅํ•˜๋ผ.:\n์ œ๋ชฉ: {title}\n์š”์•ฝ: {snippet}"
103
- return hf_client.text_generation(prompt, max_new_tokens=500)
104
- except Exception as e:
105
- return f"๋ถ„์„ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
106
-
107
  css = """
108
  footer {visibility: hidden;}
109
  """
@@ -127,8 +99,6 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as
127
  image = gr.Image(width=200, height=150)
128
  snippet = gr.Markdown()
129
  info = gr.Markdown()
130
- analyze_button = gr.Button("๋ถ„์„")
131
- summary_output = gr.Markdown(visible=False)
132
 
133
  article_components.append({
134
  'group': article_group,
@@ -136,8 +106,6 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as
136
  'image': image,
137
  'snippet': snippet,
138
  'info': info,
139
- 'analyze_button': analyze_button,
140
- 'summary_output': summary_output,
141
  'index': i,
142
  })
143
 
@@ -150,7 +118,7 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as
150
  for comp in article_components:
151
  outputs.extend([
152
  gr.update(visible=False), gr.update(), gr.update(),
153
- gr.update(), gr.update(), gr.update(visible=False),
154
  ])
155
  articles_state = []
156
  else:
@@ -166,13 +134,12 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as
166
  gr.update(value=f"### [{article['title']}]({article['link']})"),
167
  image_update,
168
  gr.update(value=f"**์š”์•ฝ:** {article['snippet']}"),
169
- gr.update(value=f"**์ถœ์ฒ˜:** {article['channel']} | **์‹œ๊ฐ„:** {article['time']}"),
170
- gr.update(visible=False),
171
  ])
172
  else:
173
  outputs.extend([
174
  gr.update(visible=False), gr.update(), gr.update(),
175
- gr.update(), gr.update(), gr.update(visible=False),
176
  ])
177
  articles_state = articles
178
 
@@ -183,7 +150,7 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as
183
  search_outputs = [gr.Markdown(visible=False)]
184
  for comp in article_components:
185
  search_outputs.extend([comp['group'], comp['title'], comp['image'],
186
- comp['snippet'], comp['info'], comp['summary_output']])
187
  search_outputs.extend([articles_state, status_message])
188
 
189
  search_button.click(
@@ -193,21 +160,4 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as
193
  show_progress=False
194
  )
195
 
196
- for idx, comp in enumerate(article_components):
197
- def create_analyze_function(index=idx):
198
- def analyze_article(articles):
199
- if articles and index < len(articles):
200
- article = articles[index]
201
- summary = summarize_article(article['title'], article['snippet'])
202
- return gr.update(value=summary, visible=True), gr.update(visible=False)
203
- return gr.update(value="๊ธฐ์‚ฌ ์ •๋ณด๋ฅผ ์ฐพ์„ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.", visible=True), gr.update(visible=False)
204
- return analyze_article
205
-
206
- comp['analyze_button'].click(
207
- create_analyze_function(),
208
- inputs=[articles_state],
209
- outputs=[comp['summary_output'], status_message],
210
- show_progress=True
211
- )
212
-
213
  iface.launch()
 
3
  import json
4
  import os
5
  from datetime import datetime, timedelta
 
6
 
7
  API_KEY = os.getenv("SERPHOUSE_API_KEY")
8
+
9
+ MAJOR_COUNTRIES = [
10
+ "United States", "United Kingdom", "Canada", "Australia", "Germany",
11
+ "France", "Japan", "South Korea", "China", "India",
12
+ "Brazil", "Mexico", "Russia", "Italy", "Spain",
13
+ "Netherlands", "Singapore", "Hong Kong"
14
+ ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
  def search_serphouse(query, country, page=1, num_result=100):
17
  url = "https://api.serphouse.com/serp/live"
 
18
 
19
  now = datetime.utcnow()
20
  yesterday = now - timedelta(days=1)
 
23
  payload = {
24
  "data": {
25
  "q": query,
26
+ "domain": "google.com", # ๊ธฐ๋ณธ ๋„๋ฉ”์ธ ์‚ฌ์šฉ
27
+ "loc": country, # ๊ตญ๊ฐ€ ์ด๋ฆ„์„ location์œผ๋กœ ์‚ฌ์šฉ
28
  "lang": "en",
29
  "device": "desktop",
30
  "serp_type": "news",
 
76
  results = search_serphouse(query, country)
77
  return format_results_from_raw(results)
78
 
 
 
 
 
 
 
 
 
 
79
  css = """
80
  footer {visibility: hidden;}
81
  """
 
99
  image = gr.Image(width=200, height=150)
100
  snippet = gr.Markdown()
101
  info = gr.Markdown()
 
 
102
 
103
  article_components.append({
104
  'group': article_group,
 
106
  'image': image,
107
  'snippet': snippet,
108
  'info': info,
 
 
109
  'index': i,
110
  })
111
 
 
118
  for comp in article_components:
119
  outputs.extend([
120
  gr.update(visible=False), gr.update(), gr.update(),
121
+ gr.update(), gr.update()
122
  ])
123
  articles_state = []
124
  else:
 
134
  gr.update(value=f"### [{article['title']}]({article['link']})"),
135
  image_update,
136
  gr.update(value=f"**์š”์•ฝ:** {article['snippet']}"),
137
+ gr.update(value=f"**์ถœ์ฒ˜:** {article['channel']} | **์‹œ๊ฐ„:** {article['time']}")
 
138
  ])
139
  else:
140
  outputs.extend([
141
  gr.update(visible=False), gr.update(), gr.update(),
142
+ gr.update(), gr.update()
143
  ])
144
  articles_state = articles
145
 
 
150
  search_outputs = [gr.Markdown(visible=False)]
151
  for comp in article_components:
152
  search_outputs.extend([comp['group'], comp['title'], comp['image'],
153
+ comp['snippet'], comp['info']])
154
  search_outputs.extend([articles_state, status_message])
155
 
156
  search_button.click(
 
160
  show_progress=False
161
  )
162
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
163
  iface.launch()