import sys
import os

# 获取项目的根目录（假设当前文件在项目的子目录中）
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
sys.path.append(project_root)

from service.database_service.mongodb.connect.mongodb_client import global_mongo_client
from service.database_service.redis.connect.redis_client import global_redis_client


class ListServicePipeline:
    def __init__(self):
        self.collection = None

    def open_spider(self, spider):
        self.collection = global_mongo_client.connect('scrapy_crawl_news', 'news_list')

    def process_item(self, item, spider):
        filter_list = list()
        for data in item['data_list']:
            # 1. 链接地址不为空
            if data['href'] is not None:
                query = {"news_id": data['news_id']}
                count = self.collection.count_documents(query)
                if count == 0:
                    print(f"写入新新闻: {data}")
                    result = self.collection.insert_one(data)
                    if result is not None:
                        print(f"写入新闻成功")
            else: filter_list.append(data)

        # 2. 过滤链接地址为空的数据
        filtered_datas = [data for data in item['data_list'] if data not in filter_list]
        return filtered_datas

    def close_spider(self, spider):
        global_mongo_client.disconnect()


class NewsLinkPipeline:
    def __init__(self):
        self.redis = global_redis_client.connect()

    def open_spider(self, spider):
        pass

    def process_item(self, item, spider):
        if item is not None:
            for data in item:
                self.redis.hset("news_links", data['news_id'], data['href'])
            return item
        return None

    def close_spider(self, spider):
        global_redis_client.disconnect()
