seawolf2357 commited on
Commit
c57002d
โ€ข
1 Parent(s): 12d3fa4

Update app-backup.py

Browse files
Files changed (1) hide show
  1. app-backup.py +97 -85
app-backup.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import gradio as gr
2
  import requests
3
  import json
@@ -6,25 +7,25 @@ from datetime import datetime, timedelta
6
  API_KEY = "V38CNn4HXpLtynJQyOeoUensTEYoFy8PBUxKpDqAW1pawT1vfJ2BWtPQ98h6"
7
 
8
  MAJOR_COUNTRIES = [
9
- "United States", "United Kingdom", "Canada", "Australia", "Germany",
10
- "France", "Japan", "South Korea", "China", "India",
11
- "Brazil", "Mexico", "Russia", "Italy", "Spain",
12
- "Netherlands", "Sweden", "Switzerland", "Norway", "Denmark",
13
- "Finland", "Belgium", "Austria", "New Zealand", "Ireland",
14
- "Singapore", "Hong Kong", "Israel", "United Arab Emirates", "Saudi Arabia",
15
- "South Africa", "Turkey", "Egypt", "Poland", "Czech Republic",
16
- "Hungary", "Greece", "Portugal", "Argentina", "Chile",
17
- "Colombia", "Peru", "Venezuela", "Thailand", "Malaysia",
18
  "Indonesia", "Philippines", "Vietnam", "Pakistan", "Bangladesh"
19
  ]
20
 
21
  def search_serphouse(query, country, page, num_result):
22
  url = "https://api.serphouse.com/serp/live"
23
-
24
  now = datetime.utcnow()
25
  yesterday = now - timedelta(days=1)
26
  date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"
27
-
28
  payload = {
29
  "data": {
30
  "q": query,
@@ -39,13 +40,13 @@ def search_serphouse(query, country, page, num_result):
39
  "date_range": date_range
40
  }
41
  }
42
-
43
  headers = {
44
  "accept": "application/json",
45
  "content-type": "application/json",
46
  "authorization": f"Bearer {API_KEY}"
47
  }
48
-
49
  try:
50
  response = requests.post(url, json=payload, headers=headers)
51
  response.raise_for_status()
@@ -56,91 +57,102 @@ def search_serphouse(query, country, page, num_result):
56
  error_msg += f"\nResponse content: {response.text}"
57
  return {"error": error_msg}
58
 
59
- def format_results(results):
60
- all_results = "<h2>๋ชจ๋“  ๋‰ด์Šค ๊ฒฐ๊ณผ (24์‹œ๊ฐ„ ์ด๋‚ด)</h2>"
61
- debug_info = "<h2>๋””๋ฒ„๊ทธ ์ •๋ณด</h2>"
62
-
63
- if isinstance(results, dict) and "error" in results:
64
- all_results += f"<p>์˜ค๋ฅ˜ ๋ฐœ์ƒ: {results['error']}</p>"
65
- debug_info += f"<pre>{results['error']}</pre>"
66
- return all_results, debug_info
67
-
68
- debug_info += f"<pre>{json.dumps(results, indent=2, ensure_ascii=False)}</pre>"
69
-
70
  try:
 
 
 
 
 
71
  if not isinstance(results, dict):
72
  raise ValueError("๊ฒฐ๊ณผ๊ฐ€ ์‚ฌ์ „ ํ˜•์‹์ด ์•„๋‹™๋‹ˆ๋‹ค.")
73
-
74
- if "results" not in results:
75
- raise ValueError("'results' ํ‚ค๊ฐ€ ์‘๋‹ต์— ์—†์Šต๋‹ˆ๋‹ค.")
76
-
77
- news_results = results["results"].get("news", [])
78
- debug_info += f"<p>๋‰ด์Šค ๊ฒฐ๊ณผ ์ˆ˜: {len(news_results)}</p>"
79
-
80
- if not news_results:
81
- all_results += "<p>๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.</p>"
82
  else:
83
- all_results += "<ol>"
84
- for result in news_results:
85
- title = result.get("title", "์ œ๋ชฉ ์—†์Œ")
86
- url = result.get("url", "#")
87
- snippet = result.get("snippet", "๋‚ด์šฉ ์—†์Œ")
88
- channel = result.get("channel", "์•Œ ์ˆ˜ ์—†์Œ")
89
- time_str = result.get("time", "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๊ฐ„")
90
-
91
- article_info = f"""
92
- <li>
93
- <h3><a href="{url}" target="_blank">{title}</a></h3>
94
- <p>{snippet}</p>
95
- <p><strong>์ถœ์ฒ˜:</strong> {channel} - {time_str}</p>
96
- </li>
97
- """
98
- all_results += article_info
99
-
100
- all_results += "</ol>"
101
-
 
 
 
 
 
 
 
 
 
 
 
 
 
102
  except Exception as e:
103
  error_message = f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
104
- debug_info += f"<p>{error_message}</p>"
105
- all_results += f"<p>{error_message}</p>"
106
-
107
- return all_results, debug_info
108
 
109
  def serphouse_search(query, country, page, num_result):
110
  results = search_serphouse(query, country, page, num_result)
111
- all_results, debug_info = format_results(results)
112
- return all_results, debug_info
113
 
114
  css = """
115
  footer {
116
  visibility: hidden;
117
  }
118
- ol {
119
- padding-left: 20px;
120
- }
121
- li {
122
- margin-bottom: 20px;
123
- }
124
  """
125
 
126
- iface = gr.Interface(
127
- fn=serphouse_search,
128
- inputs=[
129
- gr.Textbox(label="๊ฒ€์ƒ‰์–ด"),
130
- gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ€"),
131
- gr.Slider(1, 10, 1, label="ํŽ˜์ด์ง€"),
132
- gr.Slider(1, 100, 10, label="๊ฒฐ๊ณผ ์ˆ˜")
133
- ],
134
- outputs=[
135
- gr.HTML(label="๋ชจ๋“  ๊ฒฐ๊ณผ"),
136
- gr.HTML(label="๋””๋ฒ„๊ทธ ์ •๋ณด")
137
- ],
138
-
139
-
140
- title="24์‹œ๊ฐ„ ์ด๋‚ด ๋‰ด์Šค ๊ฒ€์ƒ‰ ์ธํ„ฐํŽ˜์ด์Šค",
141
- description="๊ฒ€์ƒ‰์–ด๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ๊ตญ๊ฐ€๋ฅผ ์„ ํƒํ•˜์—ฌ 24์‹œ๊ฐ„ ์ด๋‚ด์˜ ๋‰ด์Šค ๊ฒฐ๊ณผ๋ฅผ ๊ฐ€์ ธ์˜ต๋‹ˆ๋‹ค.",
142
- theme="Nymbo/Nymbo_Theme",
143
- css=css
144
- )
145
-
146
- iface.launch(auth=("gini","pick"))
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
  import gradio as gr
3
  import requests
4
  import json
 
7
  API_KEY = "V38CNn4HXpLtynJQyOeoUensTEYoFy8PBUxKpDqAW1pawT1vfJ2BWtPQ98h6"
8
 
9
  MAJOR_COUNTRIES = [
10
+ "United States", "United Kingdom", "Canada", "Australia", "Germany",
11
+ "France", "Japan", "South Korea", "China", "India",
12
+ "Brazil", "Mexico", "Russia", "Italy", "Spain",
13
+ "Netherlands", "Sweden", "Switzerland", "Norway", "Denmark",
14
+ "Finland", "Belgium", "Austria", "New Zealand", "Ireland",
15
+ "Singapore", "Hong Kong", "Israel", "United Arab Emirates", "Saudi Arabia",
16
+ "South Africa", "Turkey", "Egypt", "Poland", "Czech Republic",
17
+ "Hungary", "Greece", "Portugal", "Argentina", "Chile",
18
+ "Colombia", "Peru", "Venezuela", "Thailand", "Malaysia",
19
  "Indonesia", "Philippines", "Vietnam", "Pakistan", "Bangladesh"
20
  ]
21
 
22
  def search_serphouse(query, country, page, num_result):
23
  url = "https://api.serphouse.com/serp/live"
24
+
25
  now = datetime.utcnow()
26
  yesterday = now - timedelta(days=1)
27
  date_range = f"{yesterday.strftime('%Y-%m-%d')},{now.strftime('%Y-%m-%d')}"
28
+
29
  payload = {
30
  "data": {
31
  "q": query,
 
40
  "date_range": date_range
41
  }
42
  }
43
+
44
  headers = {
45
  "accept": "application/json",
46
  "content-type": "application/json",
47
  "authorization": f"Bearer {API_KEY}"
48
  }
49
+
50
  try:
51
  response = requests.post(url, json=payload, headers=headers)
52
  response.raise_for_status()
 
57
  error_msg += f"\nResponse content: {response.text}"
58
  return {"error": error_msg}
59
 
60
+ def format_results_from_raw(results):
 
 
 
 
 
 
 
 
 
 
61
  try:
62
+ debug_info = f"Raw API Response:\n{json.dumps(results, indent=2, ensure_ascii=False)}"
63
+
64
+ if isinstance(results, dict) and "error" in results:
65
+ return "Error: " + results["error"], debug_info
66
+
67
  if not isinstance(results, dict):
68
  raise ValueError("๊ฒฐ๊ณผ๊ฐ€ ์‚ฌ์ „ ํ˜•์‹์ด ์•„๋‹™๋‹ˆ๋‹ค.")
69
+
70
+ # 'results' ํ‚ค ๋‚ด๋ถ€์˜ ๊ตฌ์กฐ ํ™•์ธ
71
+ if 'results' in results:
72
+ results_content = results['results']
73
+ # 'news' ํ‚ค ํ™•์ธ
74
+ if 'news' in results_content:
75
+ news_results = results_content['news']
76
+ else:
77
+ news_results = []
78
  else:
79
+ news_results = []
80
+
81
+ if not news_results:
82
+ return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", debug_info
83
+
84
+ formatted_articles = ""
85
+ for result in news_results:
86
+ title = result.get("title", "์ œ๋ชฉ ์—†์Œ")
87
+ link = result.get("url", result.get("link", "#"))
88
+ snippet = result.get("snippet", "๋‚ด์šฉ ์—†์Œ")
89
+ channel = result.get("channel", result.get("source", "์•Œ ์ˆ˜ ์—†์Œ"))
90
+ time = result.get("time", result.get("date", "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๊ฐ„"))
91
+ image_url = result.get("img", result.get("thumbnail", ""))
92
+
93
+ # base64๋กœ ์ธ์ฝ”๋”ฉ๋œ ์ด๋ฏธ์ง€๋ฅผ ์ฒ˜๋ฆฌํ•˜์ง€ ์•Š์Œ
94
+ if image_url and not image_url.startswith("data:image"):
95
+ image_html = f'<img src="{image_url}" alt="Image" style="max-width: 100%; height: auto;">'
96
+ else:
97
+ image_html = ''
98
+
99
+ article_html = f"""
100
+ <div style="margin-bottom: 20px; border-bottom: 1px solid #ccc; padding-bottom: 20px;">
101
+ <h3><a href="{link}" target="_blank">{title}</a></h3>
102
+ <p><strong>{channel}</strong> - {time}</p>
103
+ {image_html}
104
+ <p>{snippet}</p>
105
+ </div>
106
+ """
107
+ formatted_articles += article_html
108
+
109
+ return formatted_articles, debug_info
110
+
111
  except Exception as e:
112
  error_message = f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}"
113
+ debug_info = f"Error: {error_message}\n"
114
+ return "Error: " + error_message, debug_info
 
 
115
 
116
  def serphouse_search(query, country, page, num_result):
117
  results = search_serphouse(query, country, page, num_result)
118
+ formatted_articles, debug_info = format_results_from_raw(results)
119
+ return formatted_articles, debug_info
120
 
121
  css = """
122
  footer {
123
  visibility: hidden;
124
  }
 
 
 
 
 
 
125
  """
126
 
127
+ # Gradio ์ธํ„ฐํŽ˜์ด์Šค ๊ตฌ์„ฑ
128
+ with gr.Blocks(css=css, title="24์‹œ๊ฐ„ ์ด๋‚ด ๋‰ด์Šค ๊ฒ€์ƒ‰ ์ธํ„ฐํŽ˜์ด์Šค") as iface:
129
+ gr.Markdown("## 24์‹œ๊ฐ„ ์ด๋‚ด ๋‰ด์Šค ๊ฒ€์ƒ‰ ์ธํ„ฐํŽ˜์ด์Šค")
130
+ gr.Markdown("๊ฒ€์ƒ‰์–ด๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ๊ตญ๊ฐ€๋ฅผ ์„ ํƒํ•˜์—ฌ 24์‹œ๊ฐ„ ์ด๋‚ด์˜ ๋‰ด์Šค ๊ฒฐ๊ณผ๋ฅผ ๊ฐ€์ ธ์˜ต๋‹ˆ๋‹ค.")
131
+
132
+ with gr.Tab("๊ฒ€์ƒ‰"):
133
+ with gr.Row():
134
+ query = gr.Textbox(label="๊ฒ€์ƒ‰์–ด")
135
+ country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ€", value="South Korea")
136
+ with gr.Row():
137
+ page = gr.Slider(1, 10, 1, label="ํŽ˜์ด์ง€")
138
+ num_result = gr.Slider(1, 100, 10, label="๊ฒฐ๊ณผ ์ˆ˜")
139
+
140
+ search_button = gr.Button("๊ฒ€์ƒ‰")
141
+
142
+ with gr.Tab("๋‰ด์Šค ๊ฒฐ๊ณผ"):
143
+ news_output = gr.HTML(label="๋‰ด์Šค ๊ฒฐ๊ณผ")
144
+
145
+ with gr.Tab("๋””๋ฒ„๊ทธ ์ •๋ณด"):
146
+ debug_output = gr.Textbox(label="๋””๋ฒ„๊ทธ ์ •๋ณด", lines=10)
147
+
148
+ def search_and_display(query, country, page, num_result):
149
+ articles, debug_info = serphouse_search(query, country, page, num_result)
150
+ return {news_output: articles, debug_output: debug_info}
151
+
152
+ search_button.click(
153
+ search_and_display,
154
+ inputs=[query, country, page, num_result],
155
+ outputs=[news_output, debug_output]
156
+ )
157
+
158
+ iface.launch(auth=("gini", "pick"))