Spaces:
openfree
/
Running on CPU Upgrade

MoneyRadar / app-backup1.py
seawolf2357's picture
Rename app.py to app-backup1.py
7308cb8 verified
raw
history blame
10.6 kB
import gradio as gr
import requests
import json
import os
from datetime import datetime, timedelta
from huggingface_hub import InferenceClient
API_KEY = os.getenv("SERPHOUSE_API_KEY")
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
# ๊ตญ๊ฐ€๋ณ„ ์–ธ์–ด ์ฝ”๋“œ ๋งคํ•‘
COUNTRY_LANGUAGES = {
"South Korea": "ko",
"Japan": "ja",
"China": "zh",
"Russia": "ru",
"France": "fr",
"Germany": "de",
"Spain": "es",
"Italy": "it",
"Netherlands": "nl",
"Portugal": "pt",
"Thailand": "th",
"Vietnam": "vi",
"Indonesia": "id",
"Malaysia": "ms",
"Saudi Arabia": "ar",
"United Arab Emirates": "ar",
"Egypt": "ar",
"Morocco": "ar",
"Greece": "el",
"Poland": "pl",
"Czech Republic": "cs",
"Hungary": "hu",
"Turkey": "tr",
"Romania": "ro",
"Bulgaria": "bg",
"Croatia": "hr",
"Serbia": "sr",
"Slovakia": "sk",
"Slovenia": "sl",
"Estonia": "et",
"Latvia": "lv",
"Lithuania": "lt"
}
COUNTRY_LOCATIONS = {
"United States": "United States",
"United Kingdom": "United Kingdom",
"Canada": "Canada",
"Australia": "Australia",
"Germany": "Germany",
"France": "France",
"Japan": "Japan",
"South Korea": "South Korea",
"China": "China",
"India": "India",
"Brazil": "Brazil",
"Mexico": "Mexico",
"Russia": "Russia",
"Italy": "Italy",
"Spain": "Spain",
"Netherlands": "Netherlands",
"Singapore": "Singapore",
"Hong Kong": "Hong Kong",
"Indonesia": "Indonesia",
"Malaysia": "Malaysia",
"Philippines": "Philippines",
"Thailand": "Thailand",
"Vietnam": "Vietnam",
"Belgium": "Belgium",
"Denmark": "Denmark",
"Finland": "Finland",
"Ireland": "Ireland",
"Norway": "Norway",
"Poland": "Poland",
"Sweden": "Sweden",
"Switzerland": "Switzerland",
"Austria": "Austria",
"Czech Republic": "Czech Republic",
"Greece": "Greece",
"Hungary": "Hungary",
"Portugal": "Portugal",
"Romania": "Romania",
"Turkey": "Turkey",
"Israel": "Israel",
"Saudi Arabia": "Saudi Arabia",
"United Arab Emirates": "United Arab Emirates",
"South Africa": "South Africa",
"Argentina": "Argentina",
"Chile": "Chile",
"Colombia": "Colombia",
"Peru": "Peru",
"Venezuela": "Venezuela",
"New Zealand": "New Zealand",
"Bangladesh": "Bangladesh",
"Pakistan": "Pakistan",
"Egypt": "Egypt",
"Morocco": "Morocco",
"Nigeria": "Nigeria",
"Kenya": "Kenya",
"Ukraine": "Ukraine",
"Croatia": "Croatia",
"Slovakia": "Slovakia",
"Bulgaria": "Bulgaria",
"Serbia": "Serbia",
"Estonia": "Estonia",
"Latvia": "Latvia",
"Lithuania": "Lithuania",
"Slovenia": "Slovenia",
"Luxembourg": "Luxembourg",
"Malta": "Malta",
"Cyprus": "Cyprus",
"Iceland": "Iceland"
}
MAJOR_COUNTRIES = list(COUNTRY_LOCATIONS.keys())
def translate_query(query, country):
try:
if country in COUNTRY_LANGUAGES:
target_lang = COUNTRY_LANGUAGES[country]
prompt = f"Translate the following English text to {target_lang} language. Only output the translated text without any explanations or quotes: {query}"
translated = hf_client.text_generation(
prompt,
max_new_tokens=100,
temperature=0.3
)
return translated.strip()
return query
except Exception as e:
print(f"Translation error: {str(e)}")
return query
def search_serphouse(query, country, page=1, num_result=10):
url = "https://api.serphouse.com/serp/live"
# ๊ฒ€์ƒ‰์–ด ๋ฒˆ์—ญ
translated_query = translate_query(query, country)
print(f"Original query: {query}")
print(f"Translated query: {translated_query}")
payload = {
"data": {
"q": query,
"domain": "google.com",
"loc": COUNTRY_LOCATIONS.get(country, "United States"),
"lang": "en",
"device": "desktop",
"serp_type": "news",
"page": "1",
"num": "10"
}
}
headers = {
"accept": "application/json",
"content-type": "application/json",
"authorization": f"Bearer {API_KEY}"
}
try:
response = requests.post(url, json=payload, headers=headers)
print("Request payload:", json.dumps(payload, indent=2))
print("Response status:", response.status_code)
response.raise_for_status()
return {"results": response.json(), "translated_query": translated_query}
except requests.RequestException as e:
return {"error": f"Error: {str(e)}", "translated_query": query}
def format_results_from_raw(response_data):
if "error" in response_data:
return "Error: " + response_data["error"], []
try:
results = response_data["results"]
translated_query = response_data["translated_query"]
news_results = results.get('results', {}).get('results', {}).get('news', [])
if not news_results:
return "๊ฒ€์ƒ‰ ๊ฒฐ๊ณผ๊ฐ€ ์—†์Šต๋‹ˆ๋‹ค.", []
articles = []
for idx, result in enumerate(news_results, 1):
articles.append({
"index": idx,
"title": result.get("title", "์ œ๋ชฉ ์—†์Œ"),
"link": result.get("url", result.get("link", "#")),
"snippet": result.get("snippet", "๋‚ด์šฉ ์—†์Œ"),
"channel": result.get("channel", result.get("source", "์•Œ ์ˆ˜ ์—†์Œ")),
"time": result.get("time", result.get("date", "์•Œ ์ˆ˜ ์—†๋Š” ์‹œ๊ฐ„")),
"image_url": result.get("img", result.get("thumbnail", "")),
"translated_query": translated_query
})
return "", articles
except Exception as e:
return f"๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘ ์˜ค๋ฅ˜ ๋ฐœ์ƒ: {str(e)}", []
def serphouse_search(query, country):
response_data = search_serphouse(query, country)
return format_results_from_raw(response_data)
css = """
footer {visibility: hidden;}
"""
# Gradio ์ธํ„ฐํŽ˜์ด์Šค ์ˆ˜์ •
with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css, title="NewsAI ์„œ๋น„์Šค") as iface:
gr.Markdown("๊ฒ€์ƒ‰์–ด๋ฅผ ์ž…๋ ฅํ•˜๊ณ  ์›ํ•˜๋Š” ๊ตญ๊ฐ€๋ฅผ ์„ ํƒํ•˜๋ฉด, ๊ฒ€์ƒ‰์–ด์™€ ์ผ์น˜ํ•˜๋Š” 24์‹œ๊ฐ„ ์ด๋‚ด ๋‰ด์Šค๋ฅผ ์ตœ๋Œ€ 100๊ฐœ ์ถœ๋ ฅํ•ฉ๋‹ˆ๋‹ค.")
with gr.Column():
with gr.Row():
query = gr.Textbox(label="๊ฒ€์ƒ‰์–ด")
country = gr.Dropdown(MAJOR_COUNTRIES, label="๊ตญ๊ฐ€", value="South Korea")
# ๋ฒˆ์—ญ ๊ฒฐ๊ณผ๋ฅผ ๋ณด์—ฌ์ฃผ๋Š” ์ปดํฌ๋„ŒํŠธ ์ถ”๊ฐ€
translated_query_display = gr.Markdown(visible=False)
search_button = gr.Button("๊ฒ€์ƒ‰", variant="primary")
progress = gr.Progress()
status_message = gr.Markdown(visible=False)
articles_state = gr.State([])
article_components = []
for i in range(100):
with gr.Group(visible=False) as article_group:
title = gr.Markdown()
image = gr.Image(width=200, height=150)
snippet = gr.Markdown()
info = gr.Markdown()
article_components.append({
'group': article_group,
'title': title,
'image': image,
'snippet': snippet,
'info': info,
'index': i,
})
def search_and_display(query, country, articles_state, progress=gr.Progress()):
progress(0, desc="๊ฒ€์ƒ‰์–ด ๋ฒˆ์—ญ ์ค‘...")
# ๊ฒ€์ƒ‰์–ด ๋ฒˆ์—ญ
translated_query = translate_query(query, country)
translated_display = f"**์›๋ณธ ๊ฒ€์ƒ‰์–ด:** {query}\n**๋ฒˆ์—ญ๋œ ๊ฒ€์ƒ‰์–ด:** {translated_query}" if translated_query != query else f"**๊ฒ€์ƒ‰์–ด:** {query}"
progress(0.2, desc="๊ฒ€์ƒ‰ ์‹œ์ž‘...")
error_message, articles = serphouse_search(query, country)
progress(0.5, desc="๊ฒฐ๊ณผ ์ฒ˜๋ฆฌ ์ค‘...")
outputs = [gr.update(value=translated_display, visible=True)] # ๋ฒˆ์—ญ ๊ฒฐ๊ณผ ํ‘œ์‹œ
if error_message:
outputs.append(gr.update(value=error_message, visible=True))
for comp in article_components:
outputs.extend([
gr.update(visible=False), gr.update(), gr.update(),
gr.update(), gr.update()
])
articles_state = []
else:
outputs.append(gr.update(value="", visible=False))
total_articles = len(articles)
for idx, comp in enumerate(article_components):
progress((idx + 1) / total_articles, desc=f"๊ฒฐ๊ณผ ํ‘œ์‹œ ์ค‘... {idx + 1}/{total_articles}")
if idx < len(articles):
article = articles[idx]
image_url = article['image_url']
image_update = gr.update(value=image_url, visible=True) if image_url and not image_url.startswith('data:image') else gr.update(value=None, visible=False)
outputs.extend([
gr.update(visible=True),
gr.update(value=f"### [{article['title']}]({article['link']})"),
image_update,
gr.update(value=f"**์š”์•ฝ:** {article['snippet']}"),
gr.update(value=f"**์ถœ์ฒ˜:** {article['channel']} | **์‹œ๊ฐ„:** {article['time']}")
])
else:
outputs.extend([
gr.update(visible=False), gr.update(), gr.update(),
gr.update(), gr.update()
])
articles_state = articles
progress(1.0, desc="์™„๋ฃŒ!")
outputs.append(articles_state)
outputs.append(gr.update(visible=False))
return outputs
search_outputs = [translated_query_display, gr.Markdown(visible=False)] # ๋ฒˆ์—ญ ๊ฒฐ๊ณผ ์ปดํฌ๋„ŒํŠธ ์ถ”๊ฐ€
for comp in article_components:
search_outputs.extend([comp['group'], comp['title'], comp['image'],
comp['snippet'], comp['info']])
search_outputs.extend([articles_state, status_message])
search_button.click(
search_and_display,
inputs=[query, country, articles_state],
outputs=search_outputs,
show_progress=True
)
iface.launch()