# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.utils.project import get_project_settings
import telnetlib
from pymongo import MongoClient
import logging

logging.getLogger().setLevel(logging.INFO)

settings = get_project_settings()


class IpproxyPipeline(object):

    @staticmethod
    def is_valid_ip(ip, port):
        # 验证IP是否可用
        try:
            telnetlib.Telnet(ip, port, timeout=2)
            return True
        except:
            return False

    def open_spider(self, spider):
        # 连接MONGODB
        try:
            client = MongoClient(
                settings['MONGODB_HOST'], settings['MONGODB_PORT'])
            logging.info('Mongodb连接成功！')
        except Exception as e:
            logging.error('Mongodb连接失败：{0}'.format(e))

        db = client.admin.authenticate(
            settings['MONGODB_USER'], settings['MONGODB_PASSWD'])

        if db:
            logging.info('Mongodb验证成功！')
        else:
            logging.error('Mongodb验证失败！')

        self.collection = client.ipproxy[spider.name]

        # 校验当前已有代理
        for record in self.collection.find():
            if not IpproxyPipeline.is_valid_ip(record['ip'], record['port']):
                self.collection.delete_one({'_id': record['_id']})

                logging.warning('{0}:{1}代理不可用，已删除！'.format(
                    record['ip'], record['port']))

    def process_item(self, item, spider):
        if len(list(self.collection.find({'ip': item['ip']}, {'port': item['port']}))) < 1 and IpproxyPipeline.is_valid_ip(item['ip'], item['port']):
            try:
                self.collection.insert_one(item)
                logging.info('已写入{0}！' .format(spider.name))
            except Exception as e:
                logging.error('写入出错：{0}'.format(e))
        else:
            logging.warn('{0}:{1}代理不可用，或已存在！'.format(item['ip'], item['port']))

        return item

    def close_spider(self, spider):
        logging.info('{0}爬取结束！'.format(spider.name))
