# -*- coding: utf-8 -*-
import scrapy
from CrawlerProxy.items import CrawlerProxyItem
import telnetlib

class ProxySpider(scrapy.Spider):
    name = 'proxy'
    allowed_domains = ['www.xicidaili.com']
    start_urls = [
        'http://www.xicidaili.com/nn/1',
        'http://www.xicidaili.com/nn/2',
        'http://www.xicidaili.com/nn/3',
        'http://www.xicidaili.com/nn/4',
        'http://www.xicidaili.com/nn/5',
        'http://www.xicidaili.com/nn/6',
        'http://www.xicidaili.com/nn/7',
        'http://www.xicidaili.com/nn/8',
        'http://www.xicidaili.com/nn/9',
        'http://www.xicidaili.com/nn/10',
    ]

    def parse(self, response):
        xici_list = response.xpath("//*/table[@id='ip_list']//tr")

        items = []
        for xici in xici_list:
            item = CrawlerProxyItem()
            item['ip'] = xici.xpath(".//td[2]/text()").extract_first()
            item['port'] = xici.xpath(".//td[3]/text()").extract_first()
            item['position'] = xici.xpath(".//td[4]/a/text()").extract_first()
            item['type'] = xici.xpath(".//td[6]/text()").extract_first()
            speed = xici.xpath('.//td[7]/div[@class="bar"]/@title').re('\d{0,2}\.\d{0,}')
            item['speed'] = speed[0] if len(speed) > 0 else 0
            item['last_check_time'] = xici.xpath(".//td[10]/text()").extract_first()
            print('item %s' % item)
            if self.telnet(item):
                yield item

    # 检测代理可用性
    def telnet(self, item):
        try:
            telnetlib.Telnet(item['ip'], port=item['port'], timeout=3.0)
        except Exception as e:
            print(e)
            return False
        else:
            print("Connect Success!")
            return True
