from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Boolean, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from datetime import datetime
import requests
from bs4 import BeautifulSoup
from rss_reader.logger import logger

Base = declarative_base()


class User(Base):
    __tablename__ = 'users'
    id = Column(Integer, primary_key=True)
    username = Column(String(50), unique=True, nullable=False)
    password = Column(String(100), nullable=False)
    created_at = Column(DateTime, default=datetime.now)
    feeds = relationship("Feed", back_populates="user")




class Feed(Base):
    __tablename__ = 'feeds'
    id = Column(Integer, primary_key=True)
    title = Column(String(100), nullable=False)
    url = Column(String(500), nullable=False, unique=True)
    icon_url = Column(String(500))  # 新增图标URL字段
    category = Column(String(50), default='未分类')
    last_updated = Column(DateTime)
    user_id = Column(Integer, ForeignKey('users.id'))
    user = relationship("User", back_populates="feeds")
    articles = relationship("Article", back_populates="feed")

    @property
    def unread_count(self):
        return len([a for a in self.articles if not a.read])

class Article(Base):
    __tablename__ = 'articles'
    id = Column(Integer, primary_key=True)
    title = Column(String(200), nullable=False)
    url = Column(String(500), nullable=False, unique=True)
    summary = Column(Text)
    content = Column(Text)
    published = Column(DateTime)
    read = Column(Boolean, default=False)
    starred = Column(Boolean, default=False)
    feed_id = Column(Integer, ForeignKey('feeds.id'))
    feed = relationship("Feed", back_populates="articles")

    def fetch_full_content(self):
        """获取文章全文内容"""
        if not self.content:
            try:
                response = requests.get(self.url, timeout=10)
                soup = BeautifulSoup(response.text, 'html.parser')
                self.content = str(soup.find('article') or soup.find('main') or soup.body)
            except Exception as e:
                logger.error(f"获取全文失败: {str(e)}, url:{self.url}")
                self.content = self.summary
        return self.content