# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html

# from scrapy.pipelines.files import FilesPipeline
import datetime
import os
import json

class DazhongdianpingPipeline(object):
    def open_spider(self,spider):
        print('spider opening')
        dirs='./result/'
        if not os.path.exists(dirs):
            os.makedirs(dirs)

        now=datetime.datetime.now().strftime('%Y%m%d_%H%M%S')
        self.dzdp_fp=open(f'./result/dzdp_{now}.json','w',encoding='utf-8')
        # self.error_fp = open(f'./result/error_{now}.json', 'w')

    def process_item(self, item, spider):


        dzdp_json=json.dumps(dict(item),ensure_ascii=False)
        self.dzdp_fp.write(dzdp_json+'\n')
        # except:
        #     error_json = json.dumps(item)
        #     self.error_fp.write(error_json + '\n')

        return item


    def close_spider(self,spider):
        self.dzdp_fp.close()
        print('spider finshed')
        # self.error_fp.close()


# class SelfDefineFilePipline(FilesPipeline):
#     """
#     继承FilesPipeline，更改其存储文件的方式
#     """
#     def __init__(self, *args, **kwargs):
#         super().__init__(*args, **kwargs)
#
#     def file_path(self, request, response=None, info=None):
#         file_path =request.url.split('/')[-1]
#         return file_path