import requests

from core.proxy_spider.base_spider import BaseSpider
import re
import json
from domain import Proxy
import time
import random
from urllib.parse import quote
from urllib.parse import unquote
from urllib.parse import urlencode
from core.db.mongo_pool import MongoPool
from utils.http import get_request_header


class LunaSpider(BaseSpider):
    urls = ['http://freeproxy.lunaproxy.com/hk/page/{}.html'.format(i) for i in range(1, 2)]
    group_xpath = '/html/body/div[2]/div[2]/div[2]/div'
    detail_xpath = {'ip': './div[1]/text()', 'port': './div[2]/text()', 'area': './div[4]/text()'}

    def __init__(self):
        super().__init__()
        self.mongodb = MongoPool()
        self.proxy = self.mongodb.select_one('45.43.32.228')
        self.proxies = {
            'http': "http://{}:{}".format(self.proxy['ip'], self.proxy['port'])
        }

    def get_response_from_url(self, url):
        if self.proxy:
            response = requests.get(url, headers=get_request_header(), proxies=self.proxies)
            return response.content
        else:
            super().get_response_from_url(url)






class Ip3366Spider(BaseSpider):
    urls = ['http://www.ip3366.net/free/?stype={}&page={}'.format(i, j) for i in range(1, 2, 2) for j in range(1, 2)]
    group_xpath = '//*[@id="list"]/table/tbody/tr'
    detail_xpath = {'ip': './td[1]/text()', 'port': './td[2]/text()', 'area': './td[5]/text()'}


class KuaiSpider(BaseSpider):
    urls = ['https://www.kuaidaili.com/free/inha/{}/'.format(i) for i in range(1, 3)]
    group_xpath = '/html/body/script[5]'
    detail_xpath = {'ip': './td[1]/text()', 'port': './td[2]/text()', 'area': './td[5]/text()'}

    def get_proxies_from_response(self, content):
        data_regex = re.compile(r'(?<=(fpsList = ))[.\s\S]*?(?=(;))')
        result = data_regex.search(content.decode())
        # print(result.group())
        if result:
            lis = json.loads(result.group())
            for item in lis:
                proxy = Proxy(item['ip'], item['port'], area=item['location'])
                yield proxy

    def get_response_from_url(self, url):
        time.sleep(random.uniform(1, 3))
        return super().get_response_from_url(url)


class ListPlusSpider(BaseSpider):
    urls = ['https://list.proxylistplus.com/Fresh-HTTP-Proxy-List-{}'.format(i) for i in range(1, 2)]
    group_xpath = '//*[@id="page"]/table[2]/tr[position()>2]'
    detail_xpath = {'ip': './td[2]/text()', 'port': './td[3]/text()', 'area': './td[5]/text()'}


class Cn66ipSpider(BaseSpider):
    urls = ['http://www.66ip.cn/mo.php?tqsl=5&i={}'.format(i) for i in range(1, 3)]
    group_xpath = ''
    detail_xpath = {}
    sxbs = ['香港', '美国', '台湾', '日本', '新加坡']

    def get_proxies_from_response(self, content):
        ip_regex = re.compile(
            r'((25[0-5]\.|2[0-4]\d\.|1\d{2}\.|[1-9]?\d\.){3}(25[0-5]|2[0-4]\d|1\d{2}|[1-9]?\d)):([1-6]?\d{1,4})')
        ips = ip_regex.findall(content.decode('gbk'))

        for item in ips:
            yield Proxy(item[0], item[3])

    def get_proxies(self):
        for area in self.sxbs:
            for url in self.urls:
                url_f = url + '&sxb=' + quote(area, encoding='gb2312')
                content = self.get_response_from_url(url_f)
                proxies = self.get_proxies_from_response(content)
                if proxies:
                    for item in proxies:
                        item.area = area
                        yield item


if __name__ == '__main__':
    # dataEncode = quote('浙江', encoding='gb2312')
    # print(dataEncode)
    # print(unquote(dataEncode, encoding='gb2312'))
    # params = {
    #     'sxb': '浙江',
    #     'tqsl': 50,
    #     'i': 1
    # }
    # print(urlencode(params, encoding='gb2312'))

    # print(Ip3366Spider.urls)
    spider = LunaSpider()
    # spider = Ip3366Spider()
    # spider = KuaiSpider()
    # spider = ListPlusSpider()
    # spider = Cn66ipSpider()
    for proxy in spider.get_proxies():
        print(proxy)
