#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time    : 2024/1/31 10:42
# @Author  : 王凯
# @File    : mysql_pipeline.py
# @Project : scrapy_spider
import threading
import time
from queue import Queue
from typing import List, Dict, Tuple

import project_setting as setting
from components.custom_item import CustomItem, UpdateCustomItem
from utils import tools
from utils.db.mysqldb import MysqlDB
from utils.tools import log


class ItemBuffer:
    def __init__(self):
        self._items_queue = Queue(maxsize=setting.ITEM_MAX_CACHED_COUNT)
        self._item_tables = {
            # 'item_name': 'table_name' # 缓存item名与表名对应关系
        }

        self._item_update_keys = {
            # 'table_name': ['id', 'name'...] # 缓存table_name与__update_key__的关系
        }

    def flush(self):
        try:
            items = []
            update_items = []
            requests = []
            callbacks = []
            items_fingerprints = []
            data_count = 0

            while not self._items_queue.empty():
                data = self._items_queue.get_nowait()
                data_count += 1

                # data 分类
                if callable(data):
                    callbacks.append(data)

                elif isinstance(data, UpdateCustomItem):
                    update_items.append(data)

                elif isinstance(data, CustomItem):
                    items.append(data)
                    if setting.ITEM_FILTER_ENABLE:
                        items_fingerprints.append(data.fingerprint)

                else:  # request-redis
                    requests.append(data)

                if data_count >= setting.ITEM_UPLOAD_BATCH_MAX_SIZE:
                    self.__add_item_to_db(
                        items, update_items, requests, callbacks, items_fingerprints
                    )

                    items = []
                    update_items = []
                    requests = []
                    callbacks = []
                    items_fingerprints = []
                    data_count = 0

            if data_count:
                self.__add_item_to_db(
                    items, update_items, requests, callbacks, items_fingerprints
                )

        except Exception as e:
            log.exception(e)

    def __pick_items(self, items, is_update_item=False):
        """
        将每个表之间的数据分开 拆分后 原items为空
        @param items:
        @param is_update_item:
        @return:
        """
        datas_dict = {
            # 'table_name': [{}, {}]
        }

        while items:
            item = items.pop(0)
            # 取item下划线格式的名
            # 下划线类的名先从dict中取，没有则现取，然后存入dict。加快下次取的速度
            item_name = item.item_name
            table_name = self._item_tables.get(item_name)
            if not table_name:
                table_name = item.table_name
                self._item_tables[item_name] = table_name

            if table_name not in datas_dict:
                datas_dict[table_name] = []

            datas_dict[table_name].append(item.to_dict)

            if is_update_item and table_name not in self._item_update_keys:
                self._item_update_keys[table_name] = item.update_key

        return datas_dict

    def __export_to_db(self, table, datas, is_update=False, update_keys=()):
        if is_update:
            if not self.update_items(table, datas, update_keys=update_keys):
                log.error(
                    f"{self.__class__.__name__} 更新数据失败. table: {table}  items: {datas}"
                )
                return False

        else:
            if not self.save_items(table, datas):
                log.error(
                    f"{self.__class__.__name__} 保存数据失败. table: {table}  items: {datas}"
                )
                return False

        return True

    def save_items(self, table, items: List[Dict]) -> bool:
        """
        保存数据
        Args:
            table: 表名
            items: 数据，[{},{},...]

        Returns: 是否保存成功 True / False
                 若False，不会将本批数据入到去重库，以便再次入库

        """

        return True

    def update_items(self, table, items: List[Dict], update_keys=Tuple) -> bool:
        """
        更新数据, 与UpdateItem配合使用，若爬虫中没使用UpdateItem，则可不实现此接口
        Args:
            table: 表名
            items: 数据，[{},{},...]
            update_keys: 更新的字段, 如 ("title", "publish_time")

        Returns: 是否更新成功 True / False
                 若False，不会将本批数据入到去重库，以便再次入库

        """

        return True

    def __add_item_to_db(
            self, items, update_items, requests, callbacks, items_fingerprints
    ):
        self._is_adding_to_db = True

        # 分捡
        items_dict = self.__pick_items(items)
        update_items_dict = self.__pick_items(update_items, is_update_item=True)

        # item批量入库
        failed_items = {"add": [], "update": [], "requests": []}
        while items_dict:
            table, datas = items_dict.popitem()

            log.debug(
                """
                -------------- item 批量入库 --------------
                表名: %s
                datas: %s
                    """
                % (table, tools.dumps_json(datas, indent=16))
            )

            if not self.__export_to_db(table, datas):
                failed_items["add"].append({"table": table, "datas": datas})

        # 执行批量update
        while update_items_dict:
            table, datas = update_items_dict.popitem()

            log.debug(
                """
                -------------- item 批量更新 --------------
                表名: %s
                datas: %s
                    """
                % (table, tools.dumps_json(datas, indent=16))
            )

            update_keys = self._item_update_keys.get(table)
            if not self.__export_to_db(
                    table, datas, is_update=True, update_keys=update_keys
            ):
                failed_items["update"].append(
                    {"table": table, "datas": datas, "update_keys": update_keys}
                )
        # todo 是否要记录失败的
        self._is_adding_to_db = False

    def put_item(self, item):
        if issubclass(item.__class__, CustomItem):
            # 入库前的回调
            item.pre_to_db()

        self._items_queue.put(item)


class MysqlPipeline(ItemBuffer):
    def __init__(self):
        super().__init__()
        self._to_db = None
        self._db_thread = None
        self.next_time = int(time.time())

    @property
    def to_db(self):
        if not self._to_db:
            self._to_db = MysqlDB()

        return self._to_db

    def save_items(self, table, items: List[Dict]) -> bool:
        """
        保存数据
        Args:
            table: 表名
            items: 数据，[{},{},...]

        Returns: 是否保存成功 True / False
                 若False，不会将本批数据入到去重库，以便再次入库

        """

        sql, datas = tools.make_batch_sql(table, items)
        add_count = self.to_db.add_batch(sql, datas)
        datas_size = len(datas)
        if add_count:
            tools.log.info("共导出 %s 条数据 到 %s, 重复 %s 条" % (datas_size, table, datas_size - add_count))

        return add_count is not None

    def update_items(self, table, items: List[Dict], update_keys=Tuple) -> bool:
        """
        更新数据
        Args:
            table: 表名
            items: 数据，[{},{},...]
            update_keys: 更新的字段, 如 ("title", "publish_time")

        Returns: 是否更新成功 True / False
                 若False，不会将本批数据入到去重库，以便再次入库

        """

        sql, datas = tools.make_batch_sql(table, items, update_columns=update_keys or list(items[0].keys()))
        update_count = self.to_db.add_batch(sql, datas)
        if update_count:
            msg = "共更新 %s/%s 条数据 到 %s" % (update_count // 2, len(items), table)
            if update_keys:
                msg += " 更新字段为 {}".format(update_keys)
            tools.log.info(msg)

        return update_count is not None

    def process_item(self, item: CustomItem, spider):
        self.put_item(item)
        if int(time.time()) - self.next_time > setting.ITEM_UPLOAD_INTERVAL:
            self.flush()
            self.next_time = time.time()
        # item.pre_to_db()
        # self.update_items(item.table_name, [item.to_dict], item.__update_key__)
        return item

    def close_spider(self, spider):
        self.flush()

    def flush_back(self, spider):
        while True:
            self.flush()
            if spider.crawler.engine.slot is None or spider.crawler.engine.spider_is_idle():
                break
            time.sleep(0.5)

    def open_spider(self, spider):
        self._db_thread = threading.Thread(target=self.flush_back, args=(spider,)).start()
