# -*- coding: utf-8 -*-
import scrapy
from get_proxy.items import GetProxyItem


class ProxySpiderSpider(scrapy.Spider):
    name = "proxy_spider"
    allowed_domains = ["localhost:8888"]
    start_urls = ['http://localhost:8888/']

    def parse(self, response):
        sub_selector = response.xpath('//table[@class="table table-striped"]//tr')
        items = []

        for sub in sub_selector:
            print '---------add----item--------'
            item = GetProxyItem()
            print sub.extract()
            item['ip'] = sub.xpath('./td[2]/text()').extract()[0]
            item['port'] = sub.xpath('./td[3]/text()').extract()[0]
            item['location'] = sub.xpath('./td[4]/a/text()').extract()[0]
            item['protocol'] = sub.xpath('./td[5]/a/text()').extract()[0]
            item['last_detect_time'] = sub.xpath('./td[7]/text()').extract()[0]

            print sub.xpath('//td[2]').extract()[0]

            items.append(item)

        return items
