# -*- coding: utf-8 -*-
import scrapy
import json
from scrapys.items import ScrapysItem

class KuaidailiSpider(scrapy.Spider):
    name = 'kuaidaili'
    allowed_domains = ['kuaidaili.com']

    def __init__(self, star_page=1, end_page=3, *args, **kwargs):
        super(KuaidailiSpider, self).__init__(*args, **kwargs)
        self.start_urls = ['https://www.kuaidaili.com/free/inha/{0}/'.format(star_page) ]
        self.star_page = int(star_page)
        self.end_page = int(end_page)

    def parse(self, response):

        lis = response.xpath('//table[@class="table table-bordered table-striped"]/tbody/tr')
        try:
            for ip in lis:
                item = ScrapysItem()
                item['ip'] = ip.xpath('//td[1]/text()').extract()
                item['port'] = ip.xpath('//td[2]/text()').extract()
                item['type'] = ip.xpath('//td[4]/text()').extract()
            yield item

            for i in range(self.star_page, self.end_page):
                self.url = 'https://www.kuaidaili.com/free/inha/{}/'.format(str(i+1))
                try:
                    yield scrapy.Request(url=self.url, callback=self.parse)
                except Exception as e:
                    exit("中断")
                    raise
        except Exception as e:
            exit('失败')
            raise
