# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
from wisdom.crawler.items import NewsItem
from scrapy.exceptions import DropItem
from typing import Dict, Any, TYPE_CHECKING
from wisdom.config import Config
from wisdom.models.base import create_tables, SessionLocal
from wisdom.models.news import News
import pendulum

if TYPE_CHECKING:
    from sqlalchemy.orm import sessionmaker
class MergeNewsPipeline:
    def __init__(self) -> None:
        self.item_dict: Dict[str, NewsItem]  = {}

    @classmethod
    def from_crawler(cls,crawler):
        create_tables()
        return cls()
    def process_item(self, item, spider):
        if not isinstance(item, NewsItem):
            raise DropItem("Invalid item type") 
        adapter = ItemAdapter(item)
        code = adapter.get("code")
        if len(code) == 0:
            raise DropItem("Invalid item code") 
        
        if isinstance(code, str) and code in self.item_dict:  
            # 如果已经在字典中，则合并item  
            self.merge_item(self.item_dict[code], item)  
        else:  
            # 如果不在字典中，则添加到字典中  
            self.item_dict[code] = item  
        return item
    
    def merge_item(self, existing_item: NewsItem, new_item: NewsItem):  
        # 在这里实现合并逻辑，例如更新字段等
        for field in new_item.fields:
            if field == "fin_tags":
                existing_item["fin_tags"] = list(set(existing_item["fin_tags"] + new_item["fin_tags"]))
            elif new_item.get(field) and not existing_item.get(field):  
                existing_item[field] = new_item[field]
        
    def close_spider(self, spider):  
        # Spider关闭时，处理并输出所有合并后的Items  
        print(f"============ count {len(self.item_dict)} items ===========")
        session = SessionLocal()
        try:
            for item in self.item_dict.values():
                listed_company_value = 1 if item.get('listed_company') else 0
                published_date = pendulum.parse(str(item['published_date']), tz='Asia/Shanghai')
                my_item: News = session.query(News).filter_by(url=item['url']).first()  
                if my_item:  
                    # 如果找到具有相同code的记录，则更新它  
                    if my_item.title != item['title']:  
                        my_item.title = item['title']
                    if  my_item.url != item['url']:  
                        my_item.url = item['url']
                    if my_item.content != item['content']:  
                        my_item.content = item['content']
                    if my_item.source != item['source']:  
                        my_item.source = item['source']
                    if my_item.author != item['author']:  
                        my_item.author = item['author']
                    if my_item.category != item['category']:  
                        my_item.category = item['category']
                    if my_item.thumbnail != item['thumbnail']:  
                        my_item.thumbnail = item['thumbnail']
                    if my_item.published_date != published_date:  
                        my_item.published_date = published_date
                    if my_item.stock_list != list(set(my_item.stock_list + item['stock_list'])):
                        my_item.stock_list = list(set(my_item.stock_list + item['stock_list']))
                    if my_item.listed_company == 0 and listed_company_value == 1:
                        my_item.listed_company = 1
                    if my_item.fin_tags != list(set(my_item.fin_tags + item['fin_tags'])):
                        my_item.fin_tags = list(set(my_item.fin_tags + item['fin_tags']))
                else:  
                    # 如果没有找到，则创建一个新的记录
                    my_item = News(
                        title=item['title'],
                        url=item['url'],
                        content=item['content'],
                        source=item['source'],
                        published_date=published_date,
                        category=item['category'],
                        stock_list=item['stock_list'],
                        listed_company=listed_company_value,
                        fin_tags=item['fin_tags']
                    )  
                    session.add(my_item)  
                session.commit()
        except Exception as e:
            print(f"Error: {e}")
        finally:
            session.close()