'''
Descripttion: 
Author: YeJianFa
Date: 2021-03-11 17:20:20
LastEditors: Please set LastEditors
LastEditTime: 2021-07-01 14:27:35
'''
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
from scrapy.exporters import CsvItemExporter
import time
import copy
import json

# 单文章热度item处理
class ZdmhotbtwdatePipeline:
    # def open_spider(self, spider):
    #     self.filename = './file/' + time.strftime('%Y_%m_%d_%H_%M_%S')
    #     self.file = open(self.filename+".csv", "wb")
    #     self.exporter = CsvItemExporter(self.file,       
    #     fields_to_export=["title","author","hot", "date"])
    #     self.exporter.start_exporting()

    def process_item(self, item, spider):
        # self.exporter.export_item(item)
        data_item = copy.deepcopy(item) #深拷贝，防止数据重复、错乱
        spider.data.append(dict(data_item))
        #print(spider.data)

    def close_spider(self,spider):
        a = json.dumps(spider.data)
        #print(json.dumps(spider.data,ensure_ascii=False))
        print(a)

        #json.dumps(spider.data,ensure_ascii=False)
        # self.exporter.finish_exporting()
        # self.file.close()

# 总文章热度item处理
class ActHotAllPipeline:
    def process_item(self, item, spider):
        data_item = copy.deepcopy(item) #深拷贝，防止数据重复、错乱
        key = data_item['author']
        if key in spider.data_dic :
            spider.data_dic[key]['hot'] += data_item['hot']
            spider.data_dic[key]['quantity'] += 1
        else:
            spider.data_dic[key] = {'hot':data_item['hot'],'quantity':1}

        #spider.data.append(dict(data_item))
    def close_spider(self,spider):
        data = []
        for key in spider.data_dic:
            tem_dir = {}
            tem_dir['author'] = key
            tem_dir['hot'] = spider.data_dic[key]['hot']
            tem_dir['quantity'] = spider.data_dic[key]['quantity']
            data.append(tem_dir)

        a = json.dumps(data)
        print(a)


# 活动列表item处理
class ActivityListPipeline:
    def process_item(self, item, spider):
        data_item = copy.deepcopy(item) #深拷贝，防止数据重复、错乱
        spider.data.append(dict(data_item))

    def close_spider(self,spider):
        a = json.dumps(spider.data)
        print(a)
        