# -*- coding: utf-8 -*-
import scrapy
from lxml import etree
import json
from proxySpider.items import ProxyspiderItem

class ProxySpider(scrapy.Spider):
    name = 'proxy'
    allowed_domains = ['kuaidaili.com']

    start_urls = ['https://www.kuaidaili.com/free/inha/1/']

    # for i in range(1,6):
    #     i = 'https://www.kuaidaili.com/free/inha/%d/' % i
    #     start_urls.append(i)


    def parse(self, response):
        print(response,'++++++++++++++++')
        # print('--------------------------------------------------------')
        # print(response.text)
        # print('--------------------------------------------------------')

        msg = etree.HTML(response.text)
        proxyip_list = []
        
        item = ProxyspiderItem()
    
        for i in range(1,16):
            data = msg.xpath('//*[@id="list"]/table/tbody/tr[%d]/td[1]/text()' % i)
            proxyip_list.append(data)
            item['proxyip'] = data
            yield item
    
        # yield scrapy.Request('https://www.kuaidaili.com/free/inha/10/',callback=self.parse)
        for href in response.xpath('//a/@href').getall():
            yield scrapy.Request(response.urljoin(href), self.parse,meta={'proxy':'"http://117.87.179.13:9000/"'})
        return
