import requests
from bs4 import BeautifulSoup
from datetime import datetime
from app.models.news_model import News
from app.models import db
from app.utils.news_parsers import parse_sina_news, parse_xueqiu_news, parse_pbc_news



def fetch_news():
    """
    从各新闻源获取最新新闻并存入数据库
    """
    # 新浪财经新闻
    sina_news = get_sina_news()
    save_news_items(sina_news)
    
    # 雪球新闻
    # xueqiu_news = get_xueqiu_news()
    # save_news_items(xueqiu_news)
    
    # 央行新闻
    pbc_news = get_pbc_news()
    save_news_items(pbc_news)
    
    return True

def get_sina_news():
    """获取新浪财经新闻"""
    url = 'https://finance.sina.com.cn/roll/index.d.html?cid=56589'
    try:
        response = requests.get(url, timeout=10)
        return parse_sina_news(response.text)
    except Exception as e:
        print(f"获取新浪新闻失败: {str(e)}")
        return []

def get_xueqiu_news():
    """获取雪球热帖"""
    url = 'https://xueqiu.com/statuses/hot/list.json'
    try:
        response = requests.get(url, timeout=10, headers={
            'User-Agent': 'Mozilla/5.0'
        })
        return parse_xueqiu_news(response.json())
    except Exception as e:
        print(f"获取雪球新闻失败: {str(e)}")
        return []

def get_pbc_news():
    """获取央行官网新闻"""
    url = 'http://www.pbc.gov.cn/goutongjiaoliu/113456/113469/index.html'
    try:
        response = requests.get(url, timeout=10)
        return parse_pbc_news(response.text)
    except Exception as e:
        print(f"获取央行新闻失败: {str(e)}")
        return []

def save_news_items(news_items):
    """保存新闻到数据库"""
    for item in news_items:
        if not News.query.filter_by(url=item['url']).first():
            news = News(
                title=item['title'],
                content=item.get('content', ''),
                source=item['source'],
                url=item['url'],
                image_url=item.get('image_url', ''),
                publish_time=item.get('publish_time', datetime.utcnow())
            )
            db.session.add(news)
    db.session.commit()


class NewsService:
    @staticmethod
    def get_news_list(source=None, page=1, per_page=10):
        query = News.query
        if source:
            query = query.filter_by(source=source)
        return query.order_by(News.publish_time.desc()).paginate(page=page, per_page=per_page)

    @staticmethod
    def add_news(news_data):
        news = News(
            title=news_data['title'],
            content=news_data.get('content'),
            source=news_data['source'],
            url=news_data['url'],
            image_url=news_data.get('image_url'),
            publish_time=news_data.get('publish_time')
        )
        db.session.add(news)
        db.session.commit()
        return news