#!/usr/bin/env python
# -*- encoding: utf-8 -*-

import os
import requests
import time
from pymongo import MongoClient
from bs4 import BeautifulSoup
from bs4.element import Tag

from bspage import BSPage as bp

import logging
fmt = '%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s'
logging.basicConfig(level=logging.DEBUG, format=fmt)


class ProxyTool(object):
    PROXYDB = 'http://proxydb.net/'
    GRABER_URL = 'http://proxydb.net/leecher'
    GEOLOOKUP_URL = 'http://proxydb.net/geo_lookup'
    ANONYMITY_URL = 'http://proxydb.net/anon'
    HEADERS = {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0'}

    @staticmethod
    def parse_proxydb(num=10):
        ret = []
        num = 10000 if num in ('all', -1, None) else num
        start_page = ProxyTool._gethtml(ProxyTool.PROXYDB)
        logging.debug('[*] Get first page of proxy')
        html = BeautifulSoup(start_page, 'html5lib')
        ret += bp.parse_table(html.table)
        _next = html.find('i', 'fa-arrow-right')
        logging.debug('[*] Next page %s' % str(_next))
        page_num = 1
        while _next and page_num < num:
            offset = page_num * 50
            logging.debug('[*] Get page %d' % (page_num + 1))
            url = ProxyTool.PROXYDB + '?offset=%s' % str(offset)
            page = ProxyTool._gethtml(url)
            html = BeautifulSoup(page, 'html5lib')
            ret += bp.parse_table(html.table)
            _next = html.find('i', 'fa-arrow-right')
            logging.debug('[*] Next page: %s' % str(_next))
            page_num += 1
        return ret

    @staticmethod
    def _gethtml(url):
        try:
            return requests.get(url).text
        except Exception:
            return None

    @staticmethod
    def grab(url):
        ret = []
        d = {'input': url}
        r = requests.post(ProxyTool.GRABER_URL,
                          data=d,
                          headers=ProxyTool.HEADERS)
        html = BeautifulSoup(r.text, 'html5lib')
        proxy = html.textarea(id='output').string
        ret = proxy.split()
        return ret

    @staticmethod
    def proxy2dict(ip, proxy_type='http', port=None, user=None, pwd=None):
        ret = {'http': ''}
        auth = None
        if proxy_type in ('https', 'socks5'):
            ret = {'http': '', 'https': ''}
        if ':' not in ip:
            port = port or '1080'
            ip += ':' + port
        if user and pwd:
            auth = user + ':' + pwd + '@'
        for k, v in ret.iteritems():
            v = proxy_type + '://'
            v += auth + ip if auth else ip
        return ret

    @staticmethod
    def _test(url, proxies=None, timeout=30):
        if not url.startswith('http'):
            target = url
            url = 'http://' + url
            _type = 'http'
        else:
            _type, target = url.split(':', 1)
            target = target[2:]

        ret = dict(target=target)
        # logging.debug('[*] Test %s connection...' % _type)
        try:
            start = time.time()
            r = requests.get(url,
                             headers=ProxyTool.HEADERS,
                             proxies=proxies,
                             timeout=timeout)
            end = time.time()
            tim = end - start
            if r.status_code == 200:
                # logging.debug('[+] Get html: %s' % r.text)
                logging.debug('[+] Connect %s ok' % url)
                logging.debug('[+] Time used: %f secs' % tim)
                ret[_type] = {'ok': 1, 'time': tim}
            else:
                ret[_type] = {'ok': 0, 'time': tim}
        except Exception as e:
            logging.exception(e)
            ret[_type] = dict(status=e.message)
        return ret

    @staticmethod
    def test(proxy, target='www.google.com', timeout=30):
        if isinstance(proxy, dict):
            proxies = proxy
        else:
            proxies = ProxyTool.proxy2dict(proxy)
        ret = {}
        logging.debug('[*] Test %s with proxy %s' % (target, proxy))
        if target.startswith('http'):
            ret = ProxyTool._test(target, proxies, 30)
        else:
            http = 'http://' + target
            https = 'https://' + target
            ret.update(ProxyTool._test(http, proxies, 30))
            ret.update(ProxyTool._test(https, proxies, 30))
        return ret

    def chkmyip(self):
        pass

    @staticmethod
    def chkgeo(proxy, save2file=None):
        if isinstance(proxy, list):
            inps = ' '.join(proxy)
        else:
            inps = proxy if ':' in proxy else proxy + ':' + '1080'
        d = dict(input=inps)
        r = requests.post(ProxyTool.GEOLOOKUP_URL,
                          data=d,
                          headers=ProxyTool.HEADERS)

        html = BeautifulSoup(r.text, 'html5lib')
        csv = html.find('textarea', 'form-control mb-2').string
        if save2file:
            with open(save2file, 'a') as f:
                f.write(csv)
        return csv

    @staticmethod
    def chkanony(proxy):
        proxies = ProxyTool.proxy2dict(proxy)
        r = requests.get(ProxyTool.ANONYMITY_URL,
                         headers=ProxyTool.HEADERS,
                         proxies=proxies,
                         )
        html = BeautifulSoup(r.text, 'html5lib')
        level = html.find('span', 'text-success').string
        return level

    @staticmethod
    def load(proxyfile):
        proxies = None
        with open(proxyfile, 'r') as f:
            proxies = [p.strip() for p in f.readlines() if p]
        return proxies


class ProxyBox(object):

    def __init__(self, host='localhost', port=27017, db='box'):
        self.db = MongoClient(host, port)[db]
        self.proxies = self.db.proxy.find({'Type': 'HTTPS'})
        if not self.proxies:
            self.getfromproxydb()
            self.chkavialable()

    def update(self):
        proxies = ProxyTool.parse_proxydb()
        for p in proxies:
            if self.db.exist(dict(Proxy=p['Proxy'])):
                logging.debug('[*] Update proxy %s' % p)
                self.db.proxy.update(dict(Proxy=p['Proxy']), p)
            else:
                logging.debug('[*] Insert new proxy: %s' % p)
                self.db.proxy.insert(p)

    def get(self, _type='excellent'):
        return self.db.proxy.find(dict(_type=_type))

    def all(self):
        return self.db.proxy.find()

    def getfromproxydb(self):
        proxies = ProxyTool.parse_proxydb()
        ret = self.db.proxy.insert(proxies)
        return ret

    def chkavialable(self, target='https://www.google.com.hk',
                     time_limit=10):
        _type, _ = target.split(':')
        num = 1
        for p in self.proxies:
            status = ProxyTool.test(p['Proxy'], target)
            p['status'] = status
            p['chktime'] = time.time()
            status = status[_type]
            logging.debug('[*] Check proxy %d' % num)
            # logging.debug('[*] Proxy status: %s' % status)
            if status['ok'] is 1 and status['time'] < time_limit:
                p['_type'] = 'excellent'
            elif status['ok'] is 1 and status['time'] < time_limit * 2:
                p['_type'] = 'good'
            elif status['ok'] is 1 and status['time'] < time_limit * 5:
                p['_type'] = 'notbad'
            elif status['ok'] is 0 or status['time'] >= time_limit * 5:
                p['_type'] = 'bad'
            self.db.proxy.update(dict(_id=p['_id']), p)
            num += 1


if __name__ == '__main__':
    # ProxyBox().getfromproxydb()
    ProxyBox().chkavialable()
