import scrapy
from crawlIPs.items import KuaidailiItem


class KuaidailiSpider(scrapy.Spider):
    name = 'kuaidaili'
    allowed_domains = ['kuaidaili.com']
    start_urls = ['http://kuaidaili.com/free']

    def parse(self, response):
        item = KuaidailiItem()
        ips = response.xpath('//table[@class="table table-bordered table-striped"]//tr')
        for ip in ips:
            item['type'] = ip.xpath('./td[4]/text()').get()
            curip = ip.xpath('./td[@data-title="IP"]/text()').get()
            port = ip.xpath('./td[2]/text()').get()
            item['ip_port'] = str(curip) + ':'+str(port)
            item['user_pwd'] = ''
            yield item
