import sys
import os

# 获取项目的根目录（假设当前文件在项目的子目录中）
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "..", ".."))
sys.path.append(project_root)

from core.config.mysql_config import MySQLConfig
from service.database_service.mongodb.connect.mongodb_client import global_mongo_client
from service.database_service.mysql.operation.publisher_crud import PublisherCRUD


class DetailServicePipeline:
    def __init__(self):
        self.collection_list = None
        self.collection_detail = None

    def open_spider(self, spider):
        self.collection_list = global_mongo_client.connect('scrapy_crawl_news', 'news_list')
        self.collection_detail = global_mongo_client.connect('scrapy_crawl_news', 'news_detail')

    def process_item(self, item, spider):
        data = item['data']
        # 1. 过滤数据，只要字段有为空的直接过滤，删除不符合数据在列表中的数据
        if (data['news_title'] is not None and data['news_time'] is not None
                and data['publisher_name'] is not None and data['publisher_url'] is not None
                and data['publisher_location'] is not None
                and data['publisher_total_article'] is not None
                and data['publisher_total_fans'] is not None):

            # 2. 格式化数据
            format_data = {'news_id': data['news_id'], 'news_title': data['news_title'], 'news_time': data['news_time'],
                           'publisher_name': data['publisher_name'],
                           'publisher_url': data['publisher_url'], 'publisher_location': data['publisher_location'],
                           'publisher_total_article': data['publisher_total_article'],
                           'publisher_total_fans': data['publisher_total_fans'], 'news_detail': data['news_detail'], }

            # 3. 写入数据
            result = self.collection_detail.insert_one(format_data)
            if result is not None:
                print(f"写入一条详情成功")
            return item
        else:
            print(f"数据不符合要求，删除列表数据")
            query = {"news_id": data['news_id']}
            result = self.collection_list.delete_one(query)
            if result is not None:
                print(f"删除列表数据成功")
            return None

    def close_spider(self, spider):
        global_mongo_client.disconnect()


class NewsStructureSpiderPipeline:
    def __init__(self):
        self.collection = None

    def open_spider(self, spider):
        self.collection = global_mongo_client.connect('scrapy_crawl_news', 'news_structure')

    def process_item(self, item, spider):
        """
        测试
        :param item:
        :param spider:
        :return:
        """
        # 1. 判断数据是否符合格式
        if item is None:
            return None

        data = item['data']

        # 2. 格式化数据并写入
        format_data = {'news_id': data['news_id'], 'news_detail_body': data['news_detail_body'], }
        result = self.collection.insert_one(format_data)
        if result is not None:
            print(f"写入一条新闻结构成功")
        return item

    def close_spider(self, spider):
        pass


class NewsPublisherSpiderPipeline:
    def __init__(self):
        self.db = MySQLConfig().get_redis_client()

    def open_spider(self, spider):
        pass

    def process_item(self, item, spider):
        """
        先判断该条新闻发布者数据是否存在空值，存在就直接结束，不存在进行下一个条件判断，
        发布者是否存在，存在就判断粉丝数和发布新闻文章数是否一致，不一致就更新，
        发布者不存在，直接进行添加
        :param item:
        :param spider:
        :return:
        """
        # 1. 判断数据是否符合格式
        if item is None:
            return None

        data = item['data']

        # 1. 查找数据库中是否有相同的发布者
        publisher = PublisherCRUD.get_publisher_by_name(self.db, data['publisher_name'])
        if publisher is not None:
            # 2. 如果有那就比较粉丝数和发布文章数，判断是否要更新
            if float(publisher.publisher_total_fans) < float(data['publisher_total_fans']):
                publisher.publisher_total_fans = data['publisher_total_fans']
                PublisherCRUD.update_publisher(self.db, publisher)
            if float(publisher.publisher_total_article) < float(data['publisher_total_article']):
                publisher.publisher_total_article = data['publisher_total_article']
                PublisherCRUD.update_publisher(self.db, publisher)
            return item
        else:
            # 3. 不存在的发布者直接写入数据库
            PublisherCRUD.insert_one_publisher(self.db, data)
            return item

    def close_spider(self, spider):
        pass