# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html

import json
class ProxyspiderPipeline(object):
    def process_item(self, item, spider):

        with open('proxy_ip_text.txt','a') as f:
            f.write(json.dumps(dict(item)) + '\n')
        print('======================')
        # print(json.dumps(dict(item)),'==============================')
        return item



['5', '11', '1', '16', '7', '2', '9', '14', '4', '3', '10', '19', '6', '15', '12', '8', '13', '17', '18', '31', '24', '20', '28', '33', '21', '22']
['7', '5', '9', '3', '10', '13', '6', '2', '11', '16', '8', '14', '0', '29', '15', '12', '17', '1', '4', '19', '20', '21', '0', '18', '22', '26']