from sqlalchemy.orm import sessionmaker
from spiders.db.dbmysql import get_session
from spiders.db.models import CrawlSingleTask, CrawlSource
import json


class DatabaseService:
    def __init__(self):
        self.Session = sessionmaker(bind=get_session().bind)

    def get_session(self):
        return self.Session()

    def get_task(self, task_id):
        session = self.get_session()
        try:
            return session.query(CrawlSingleTask).get(task_id)
        finally:
            session.close()

    def get_all_tasks(self):
        session = self.get_session()
        try:
            return session.query(CrawlSingleTask).all()
        finally:
            session.close()

    def add_task(self, task_data):
        session = self.get_session()
        task = CrawlSingleTask(**task_data)
        try:
            session.add(task)
            session.commit()
        except Exception as e:
            session.rollback()
            print(f"Commit failed: {e}")
        finally:
            session.close()

    def update_task(self, task_id, task_data):
        session = self.get_session()
        task = session.query(CrawlSingleTask).get(task_id)
        if task:
            for key, value in task_data.items():
                setattr(task, key, value)
            try:
                session.commit()
            except Exception as e:
                session.rollback()
                print(f"Commit failed: {e}")
            finally:
                session.close()
        else:
            session.close()
            raise ValueError("Task not found")

    def delete_task(self, task_id):
        session = self.get_session()
        task = session.query(CrawlSingleTask).filter(CrawlSingleTask.id == task_id).first()
        if task:
            try:
                session.delete(task)
                session.commit()
            except Exception as e:
                session.rollback()
                print(f"Delete failed: {e}")
            finally:
                session.close()
        else:
            session.close()
            raise ValueError("Task not found")

    def get_all_sources(self):
        session = self.get_session()
        try:
            return session.query(CrawlSource).all()
        finally:
            session.close()

    def get_source(self, source_id):
        session = self.get_session()
        try:
            return session.query(CrawlSource).filter_by(id=source_id).one_or_none()
        finally:
            session.close()

    def add_source(self, source_data):
        session = self.get_session()
        source = CrawlSource(**source_data)
        try:
            session.add(source)
            session.commit()
        except Exception as e:
            session.rollback()
            print(f"Commit failed: {e}")
        finally:
            session.close()

    def update_source(self, source_id, source_data):
        session = self.get_session()
        source = session.query(CrawlSource).filter_by(id=source_id).one_or_none()
        if source:
            for key, value in source_data.items():
                setattr(source, key, value)
            try:
                session.commit()
            except Exception as e:
                session.rollback()
                print(f"Commit failed: {e}")
            finally:
                session.close()
        else:
            session.close()
            raise ValueError("Source not found")

    def delete_source(self, source_id):
        session = self.get_session()
        source = session.query(CrawlSource).filter_by(id=source_id).one_or_none()
        if source:
            try:
                session.delete(source)
                session.commit()
            except Exception as e:
                session.rollback()
                print(f"Delete failed: {e}")
            finally:
                session.close()
        else:
            session.close()
            raise ValueError("Source not found")


# 定义定时任务
def get_tasks_with_rules():
    session = get_session()
    try:
        # 查询状态为2的任务
        tasks = session.query(CrawlSingleTask).filter_by(task_status=2).all()

        # 查询对应的来源信息
        for task in tasks:
            source = session.query(CrawlSource).filter_by(id=task.source_id).one_or_none()
            if source:
                print(f"Task ID: {task.id}")
                print(f"Task Source: {source.source_name}")
                print(f"Source Status: {source.source_status}")
            else:
                print(f"Task ID: {task.id} has no associated source.")
    except Exception as e:
        print(f"Error querying tasks: {e}")
    finally:
        session.close()
