# -*- coding: utf-8 -*-
import json
import codecs
import requests

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html

class IppoolPipeline(object):
	def __init__(self):
		self.file = codecs.open("E:\jiaocheng\F\scrapy\ippool\data\ip_list.json",'wb',encoding = 'utf-8')


	def process_item(self, item, spider):
		for i in range(len(item["IP"])):
			ip = item["IP"][i]
			port = item["PORT"][i]
			position = item["POSITION"][i]
			types = item["TYPE"][i]
			speed = item["SPEED"][i]
			last_check_time = item["LAST_CHECK_TIME"][i]
			dic = {"IP":ip,
				"PORT":port,
				"POSITION":position,
				"TYPE":types,
				"SPEED":speed,
				"LAST_CHECK_TIME":last_check_time
				}
			dicts = json.dumps(dict(dic),ensure_ascii=False)
			line = dicts + '\n'
			self.file.write(line)
		return item
	def close_spider(self,spider):
		self.file.close()