# -*- coding: utf-8 -*-
import scrapy
import time
import json
from scrapyProject.spiders.proxy.items import Proxyitem
from scrapyProject.spiders.proxy.proxyManage import proxyManageForMySQL
import re
class proxySpider(scrapy.Spider):
    name = "self_proxy"
    allowed_domains = ["www.kuaidaili.com","www.xicidaili.com"]

    def start_requests(self):
        # 爬取http://www.xicidaili.com/nn/前3 页
       # for i in range(1, 4):
       #     strLink = 'https://www.kuaidaili.com/free/inha/'+str(i)
       #     yield scrapy.Request(strLink, callback=self.parse_kuaidaili)

       # for i in range(1, 4):
       #     strLink = 'https://www.kuaidaili.com/free/intr/' + str(i)
        #    yield scrapy.Request(strLink, callback=self.parse_kuaidaili)
        manage = proxyManageForMySQL()
        for i in range(1, 4):
           strLink = 'http://www.xicidaili.com/nn/' + str(i)
           yield scrapy.Request(strLink, callback=self.parse_xicidaili)

        for proxyDaya in manage.GetAllData():
            urlData = self.handle_proxy(proxy=proxyDaya["proxy"], scheme=proxyDaya["scheme"])
            yield scrapy.Request(urlData[0], callback=self.check_available, meta=urlData[1], dont_filter=True)

        manage.proxyDeleteExpired(500)
        manage.proxyUpdateInfo()

    def handle_proxy(self, proxy, scheme):
        url = '%s://httpbin.org/ip' % scheme
        ip = re.findall(r"\b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b", proxy)
        meta = {
            'proxy': proxy,
            'dont_retry': True,
            'download_timeout': 10,
            # 以下两个字段是传递给check_available 方法的信息，方便检测
            '_proxy_scheme': scheme,
            '_proxy_ip': ip[0],
            'lastCheckTime': time.time()
        }
        return [url, meta]

    def handle_address(self, ip, port, scheme):
        url = '%s://httpbin.org/ip' % scheme
        proxy = '%s://%s:%s' % (scheme, ip, port)
        meta = {
            'proxy': proxy,
            'dont_retry': True,
            'download_timeout': 10,
            # 以下两个字段是传递给check_available 方法的信息，方便检测
            '_proxy_scheme': scheme,
            '_proxy_ip': ip,
            'lastCheckTime':time.time()
        }
        return  [url, meta]

    def parse_kuaidaili(self, response):
        dataMain = response.xpath('.//div[@id="content"]//tbody//tr')

        for dataSel in dataMain:
            ip = dataSel.xpath('.//td[@data-title="IP"]/text()').extract_first()
            port = dataMain.xpath('.//td[@data-title="PORT"]/text()').extract_first()
            scheme =  dataMain.xpath('.//td[@data-title="类型"]/text()').extract_first()
            loc =  dataMain.xpath('.//td[@data-title="位置"]/text()').extract_first()
            validTime =  dataMain.xpath('.//td[@data-title="最后验证时间"]/text()').extract_first()
            speed=  dataMain.xpath('.//td[@data-title="响应速度"]/text()').extract_first()
            urlData = self.handle_address(ip = ip, port = port, scheme = scheme)
            yield  scrapy.Request(urlData[0], callback=self.check_available, meta=urlData[1], dont_filter=True)

    def check_available(self, response):
        proxy_ip = response.meta['_proxy_ip']
        # 判断代理是否具有隐藏IP 功能
        if proxy_ip == json.loads(response.text)['origin']:
            itemData = Proxyitem()
            itemData["proxy"]= response.meta['proxy']
            itemData["proxy_scheme"]= response.meta['_proxy_scheme']
            itemData['lastCheckTime']=response.meta['lastCheckTime']
            itemData['speed'] =time.time() -  response.meta['lastCheckTime']
            yield  itemData

    def check_connect(self, response):
       # proxy_ip = response.meta['_proxy_ip']
        # 判断代理是否具有隐藏IP 功能
       #
        itemData = Proxyitem()
        itemData["proxy"] = response.meta['proxy']
        itemData["proxy_scheme"] = response.meta['_proxy_scheme']
        yield itemData

    def parse_xicidaili(self, response):
        for sel in response.xpath('//table[@id="ip_list"]/tr[position()>1]'):
            ip = sel.css('td:nth-child(2)::text').extract_first()
            port = sel.css('td:nth-child(3)::text').extract_first()
            scheme = sel.css('td:nth-child(6)::text').extract_first().lower()
            urlData = self.handle_address(ip=ip, port=port, scheme=scheme)
            yield scrapy.Request(urlData[0], callback=self.check_available, meta=urlData[1], dont_filter=True)

