# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter

import pymongo


class ProxyPipeline:
    
	def __init__(self):
		self.myclient = pymongo.MongoClient("mongodb://localhost:27017/")
		if "proxy" not in self.myclient.database_names():
			raise Exception("不存在[proxy]数据库")
		self.mydb = self.myclient["proxy"]
		if "proxy_kuai" not in self.mydb.list_collection_names():
			raise Exception("不存在[proxy_kuai]集合")
		self.mycol = self.mydb["proxy_kuai"]
		
	# 爬虫开始 就会调用
	def open_spider(self,spider):
		print('爬虫开始')
    
	def process_item(self, item, spider):
		if self.mycol.find({'ip': item['ip'], 'port': item['port']}).count() == 0:
			return self.mycol.insert_one(item)
		else:
			print('重复ip')

	# 爬虫关闭 就会调用
	def close_spider(self,spider):
		# 关闭连接
		self.myclient.close()
		print('爬虫结束')


