# # Define your item pipelines here
# #
# # Don't forget to add your pipeline to the ITEM_PIPELINES setting
# # See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# # useful for handling different item types with a single interface
# from itemadapter import ItemAdapter
# import json

# class NewscrapyPipeline:
#     def __init__(self):
#         self.file = open("./world.json","w",encoding="")
    
#     def __del__(self):
#         self.file.close()

#     def process_item(self, item, spider):
#         item = dict(item)
#         js_data = json.dumps(item,ensure_ascii=False)+",\n"
#         self.file.write(js_data)
#         #使用完管道需要把数据返回给引擎
#         return item

import json

class NewscrapyPipeline:
    def __init__(self):
        self.file = open("./world.json", "w", encoding="utf-8")

    def close_spider(self, spider):
        if self.file:
            self.file.close()

    def __del__(self):
        self.close_spider(None)  # 确保在对象被销毁时关闭文件

    def process_item(self, item, spider):
        # 将 item 写入文件
        line = json.dumps(dict(item), ensure_ascii=False) + "\n"
        self.file.write(line)
        return item
