# -*- coding: utf-8 -*-
import scrapy
from ipproxy.items import IpproxyItem


class Ip181Spider(scrapy.Spider):
    name = 'ip181'
    allowed_domains = ['www.ip181.com']
    start_urls = ['http://www.ip181.com/']

    def parse(self, response):
        all_trs = response.xpath('//tbody/tr')
        for tr in all_trs[1:]:
            row = IpproxyItem()
            ipinfo = tr.xpath('td/text()').extract()
            row['ip'] = ipinfo[0]
            row['port'] = ipinfo[1]
            row['anonymous'] = ipinfo[2]
            row['proxy_type'] = ipinfo[3]
            row['speed'] = float(ipinfo[4].split(" 秒")[0])
            row['country'] = ipinfo[5]
            row['checked_time'] = None  # ipinfo[6]
            row['proxy_name'] = self.name
            yield row
        pass
        pass
