# scraper.py
import json
from datetime import datetime

from sqlalchemy import Table, MetaData

from spiders.db.dbmysql import get_session, engine, session
from spiders.db.models import CrawlSource, CrawlSingleTask


def get_tasks_with_rules():
    session = get_session()

    # 连接表并查询
    query = session.query(task_table, source_table).join(source_table,
                                                         task_table.c.source_id == source_table.c.id).filter(
        task_table.c.task_status == 2)
    results = query.all()  # 获取所有符合条件的记录

    tasks_with_rules = []
    for task, source in results:
        tasks_with_rules.append({
            'task': {
                'id': task.id,
                'source_id': task.source_id,
                'source_book_id': task.source_book_id,
                'book_name': task.book_name,
                'author_name': task.author_name,
                'task_status': task.task_status
            },
            'source': {
                'source_name': source.source_name,
                'crawl_rule': json.loads(source.crawl_rule),  # 解析 JSON 格式的爬取规则
                'source_status': source.source_status
            }
        })

    return tasks_with_rules


def insert_crawl_source(source_name, crawl_rule, source_status, create_time=None, update_time=None):
    if not create_time:
        create_time = datetime.now()
    if not update_time:
        update_time = datetime.now()
    new_source = CrawlSource(
        source_name=source_name,
        crawl_rule=crawl_rule,
        source_status=source_status,
        create_time=create_time,
        update_time=update_time
    )
    session.add(new_source)
    session.commit()
    return new_source.id


def query_crawl_sources(source_name=None):
    query = session.query(CrawlSource)
    if source_name:
        query = query.filter(CrawlSource.source_name == source_name)
    return query.all()


def update_crawl_source(source_id, **kwargs):
    source = session.query(CrawlSource).get(source_id)
    if source:
        for key, value in kwargs.items():
            setattr(source, key, value)
        session.commit()


def delete_crawl_source(source_id):
    source = session.query(CrawlSource).get(source_id)
    if source:
        session.delete(source)
        session.commit()


def query_crawl_single_tasks(source_name=None, source_book_id=None):
    query = session.query(CrawlSingleTask)
    if source_name:
        query = query.filter(CrawlSingleTask.source_name == source_name)
    if source_book_id:
        query = query.filter(CrawlSingleTask.source_book_id == source_book_id)
    return query.all()


def update_crawl_single_task(task_id, **kwargs):
    task = session.query(CrawlSingleTask).get(task_id)
    if task:
        for key, value in kwargs.items():
            setattr(task, key, value)
        session.commit()


def delete_crawl_single_task(task_id):
    task = session.query(CrawlSingleTask).get(task_id)
    if task:
        session.delete(task)
        session.commit()


