#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright © 2016 lizongzhe 
#
# Distributed under terms of the MIT license.

import re
import logging
import requests
import base64
from bs4 import BeautifulSoup
from proxy_manager.core import ProxyHost
from proxy_manager.resource.base import BaseProxyResource

logger = logging.getLogger(__name__)


COUNTRIES = (
        ('any', ''),
        ('CN', 'China'),
        ('US', 'United States'),
        ('RU', 'Russian Federation'),
        ('HK', 'Hong Kong'),
        ('CA', 'Canada'),
        ('NL', 'Netherlands'),
        ('BR', 'Brazil'),
        ('JP', 'Japan'),
        ('KR', 'Korea, Republic of'),
        ('CH', 'Switzerland'),
        ('VE', 'Venezuela'),
        ('AT', 'Austria'),
        ('ID', 'Indonesia'),
        ('IN', 'India'),
        ('EG', 'Egypt'),
        ('SK', 'Slovakia'),
        ('TR', 'Turkey'),
        ('KZ', 'Kazakhstan'),
        ('VN', 'Viet Nam'),
        ('AU', 'Australia'),
        ('IL', 'Israel'),
        ('IT', 'Italy'),
        ('RO', 'Romania'),
        ('TH', 'Thailand'),
        ('CO', 'Colombia'),
        ('FR', 'France'),
        ('BG', 'Bulgaria'),
        ('UA', 'Ukraine'),
        ('CL', 'Chile'),
        ('AE', 'United Arab Emirates'),
        ('EU', 'Europe')
)

SCHEMES = (
    ('https', 'https'),
    ('http', 'http'),
)

AVAILABILITY = [(value, ">={}%".format(value)) for value in range(0, 100, 10)]

class ProxyResource(BaseProxyResource):
    url = 'http://proxylist.hidemyass.com/'
    def __init__(self, country='any', scheme='http', pages=2):
        self.country = country
        self.scheme = scheme
        self.pages = pages

    def download(self):
        params = {}
        if self.scheme == 'https':
            url = self.url + 'search-1307196#listable'
        else:
            url = self.url + 'search-1304592#listable'

        resp = requests.get(url)
        return resp

    def parse(self, resp):
        coun_name = [c[1] for c in COUNTRIES if c[0] == self.country][0]
        result = []
        soup = BeautifulSoup(resp.text, 'html.parser')
        for proxy_elem in soup.select('tbody tr'):
            try:
                nation = proxy_elem.select('td')[3].text.replace('\n', '').strip(' ')
                if self.country != 'any' and nation != coun_name:
                    continue
                trash = []
                styles = proxy_elem.select('td')[1].style.text.strip('\n')
                for style in styles.split('\n'):
                    if 'inline' not in style:
                        trash += re.findall('\.(.*)\{', style)

                row = str(proxy_elem.select('td')[1]).replace('\n', '')
                row = re.sub('<style>(.*)</style>', '', row)
                row = re.sub(r'<div style="display:none">[0-9]*</div>', '', row)
                row = re.sub(r'<span style="display:none">[0-9]*</span>', '', row)

                for t in trash:
                    row = re.sub(r'<span class="{}">[0-9]*</span>'.format(t), '', row)
                    row = re.sub(r'<div class="{}">[0-9]*</div>'.format(t), '', row)

                ip = re.sub('<.*?>', '', row).strip(' ')
                port = proxy_elem.select('td')[2].text.replace(' ', '').strip('\n')
                proxy_host = ProxyHost(ip=ip, port=port, scheme=self.scheme)
                result.append(proxy_host)
            except Exception as e:
                logger.error(e, exc_info=True)
        return result

    def get_proxies(self):
        try:
            resp = self.download()
            data = self.parse(resp)
            return data
        except Exception as e:
            logger.error(e, exc_info=True)
            raise e

