# \file Name: HttpClient.py
# Created:  My name 10/30/2023
# Modified: My name 05/14/2024 14:21>

# \brief

# \details

# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2, or (at your option) any later
# version.

# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.

# Had you not received a copy of the GNU General Public License yet, write
# to the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.

from enum import Enum, auto
import re
import time
import urllib.request
import sys
from urllib3.exceptions import ProxySchemeUnknown
import requests
from requests.exceptions import ConnectTimeout, ProxyError

class HttpProxy():

    re_uri = re.compile(r'(?P<schema>(http|https))://(?P<ip>([^: \t])+):(?P<port>([^: \t])+)')

    ''' internal statistic class '''
    class Stats():
        def __init__(self):
            self.data = {}
        def get(self, key):
            return self.data.get(key, 0)
        def set(self, key, value):
            self.data[key] = value
        def incr(self, key):
            if key in self.data:
                self.data[key] += 1
            else:
                self.data[key] = 1
        def decr(self, key):
            if key in self.data:
                self.data[key] -= 1
            else:
                self.data[key] = -1

    def __init__(self, _proxies):
        self.status = 'INIT'
        self.proxies = _proxies
        self.stats = self.__class__.Stats()
        self.rtt = 255
        self.avg_rtt = 255
        self.time_tick = 0
        self._timeout = 1
    @classmethod
    def get_local_proxies(cls):
        local_px = urllib.request.getproxies()
        for pname, pvalue in local_px.items():
            if pname in ['http', 'https']:
                rst = cls.re_uri.match(pvalue)
                if not rst:
                    local_px[pname] = f'{pname}://{pvalue}'
        return cls(local_px)
    @property
    def proxy(self):
        return self.proxies
    @property
    def proxy_ip(self):
        _http_p = self.proxies.get('http', '')
        rst = self.re_uri.match(_http_p)
        if rst:
            return rst.group('ip')
        return '<direct>'
    @property
    def proxy_port(self):
        _http_p = self.proxies.get('http', '')
        rst = self.re_uri.match(_http_p)
        if rst:
            return rst.group('port')
        return ''
    @property
    def score(self):
        return self.stats.get('success') - \
            self.stats.get('failed')
    @property
    def timeout(self):
        max_timeout = 3
        return min(self._timeout + self.stats.get('rct_failed'), max_timeout)
    def start(self):
        self.time_tick = round(time.time(), 3)

    def update(self, state):
        if state == 'SUCC':
            self.rtt = round(time.time() - self.time_tick, 3)
            if self.avg_rtt == 255:
                self.avg_rtt = self.rtt
            else:
                _success = self.stats.get('success')
                if _success < 50:
                    self.avg_rtt = round(
                        (self.avg_rtt * _success + \
                         self.rtt) / (_success + 1),
                        3
                    )
                else:
                    self.avg_rtt = round(
                        (self.avg_rtt * 49 + \
                         self.rtt) / 50,
                        3
                    )
            self.stats.incr('success')
            if self.stats.get('rct_failed') > 0:
                self.stats.decr('rct_failed')
            self.stats.incr('rct_success')
            ## try more time to confirm, and test speed
            if self.stats.get('rct_success') > 5:
                self.status = state
                self.stats.set('rct_success', 6)
        else:
            self.stats.incr('failed')
            if self.stats.get('rct_success') > 0:
                self.stats.decr('rct_success')
            self.stats.incr('rct_failed')
            ## when failed 3 times
            if self.stats.get('rct_failed') > 2:
                self.status = state
                self.stats.set('rct_failed', 3)

class classproperty(property):
    def __get__(self, owner_self, owner_cls):
        return self.fget(owner_cls)

class HttpManager():
    _proxies = []
    @classmethod
    def config(cls, options):
        config_proxies = []
        cls._proxies = []
        if options and isinstance(options, list):
            config__proxies = options
        for _px in config__proxies:
            cls._proxies.append(HttpProxy(_px))
        local_px = HttpProxy.get_local_proxies()
        if local_px.proxy.get('http', '') not in [p.get('http', '') for p in config_proxies]:
            cls._proxies.append(local_px)
    @classmethod
    def get_client(cls):
        return HttpClient(cls._proxies)
    @classproperty
    def proxies(cls):
        return cls._proxies

class HttpClient():
    class ProxyPolicy(Enum):
        FASTEST = auto()
        ROUND_ROBIN = auto()
    def __init__(self, _proxies):
        self.sess = requests.Session()
        self.extra_hdrs = {}
        self.method = 'GET'
        self.proxies = _proxies
        self.encoding = 'utf-8'
        self.policy = self.ProxyPolicy.FASTEST

    def add_header(self, _dict):
        if _dict and isinstance(_dict, dict):
            self.extra_hdrs.update(_dict)
    def set_encoding(self, _encoding):
        self.encoding = _encoding
    def get_proxy(self):
        if self.proxies and isinstance(self.proxies, list):
            return self.proxies[0]
        return {}
    def sort_proxies(self):
        def sort_as_fastest():
            self.proxies.sort(
                key=lambda x: x.avg_rtt if x.status != 'INIT' else 0,
                reverse=False
            )
        def sort_as_round_robin():
            self.proxies.sort(
                key=lambda x: x.success if x.status == 'SUCC' else sys.maxsize,
                reverse=False
            )

        if self.policy == self.ProxyPolicy.FASTEST:
            sort_as_fastest()
        elif self.policy == self.ProxyPolicy.ROUND_ROBIN:
            if next(
                iter([p for p in self.proxies if p.status == 'INIT']), None
            ) is None:
                sort_as_round_robin()
            else:
                sort_as_fastest()

    def fetch(self, _url):
        ## Session.request(method, url, params=None, data=None, headers=None,
        ##    cookies=None, files=None, auth=None, timeout=None,
        ##    allow_redirects=True, proxies=None, hooks=None, stream=None,
        ##    verify=None, cert=None, json=None)
        resp = None

        if {} not in self.proxies:
            self.proxies += {}
        self.sort_proxies()

        for px in self.proxies:
            try:
                req_args = {
                    "method": self.method,
                    "url": _url,
                    "headers": self.extra_hdrs,
                    "proxies": px.proxy,
                    "timeout": px.timeout,
                }
                px.start()
                resp = self.sess.request(**req_args)
                px.update('SUCC')
                break
            except ConnectTimeout as ct:
                print("ConnectTimeout")
                print(ct.request.__dict__)
                px.update('FAIL')
            except ProxySchemeUnknown as psu:
                print("ProxySchemeUnknown")
                print(psu.__dict__)
                px.update('FAIL')
            except ProxyError as pe:
                print(pe)
                px.update('FAIL')
            except Exception as ex:
                print("default Exception")
                print(type(ex))
                print(ex)
                px.update('FAIL')
        return (
            resp.status_code,
            resp.content.decode(self.encoding) if resp.content else ""
        ) if resp else (0, '')


##class EXCEPTION_REPLY_TIMEOUT(Exception):
##    pass
##
##def get_reply(url, extra_hdrs=None, proxies=None, encoding='gb2312'):
##    try:
##        if extra_hdrs is None:
##            extra_hdrs = {}
##        if proxies is None:
##            proxies = {}
##        extra_hdrs.update({'Pragma': 'no-cache'})
##        response = requests.get(
##            url=url,
##            headers=extra_hdrs,
##            proxies=proxies,
##        )
##        ##data=s_payload,
##        ##verify=f'{cert_path}/CA.cert'
##        ##cert=(),
##    except ProxySchemeUnknown as psu:
##        safe_exit(0, psu)
##    except HTTPError as httperr:
##        safe_exit(0, httperr)
##    except URLError as urlerr:
##        ## timeout will also be reported here
##        if isinstance(urlerr.reason, socket.timeout):
##            raise EXCEPTION_REPLY_TIMEOUT from urlerr
##
##        print('urlerr REASON: %s' % urlerr.reason)
##        #safe_exit(0, urlerr.reason)
##        raise Exception(urlerr.reason) from urlerr
##    try:
##        html = response.content.decode(encoding)
##        #status_code = response.status_code
##    except HTTPError as httperr:
##        safe_exit(0, httperr)
##    except http.client.HTTPException as httpexp:
##        #safe_exit(0, 'HTTPException')
##        safe_exit(0, httpexp)
##    return html
aa=     {
            "proxy_socket": "87.254.212.120:8080",
            "http": "http://87.254.212.120:8080",
            "https": "http://87.254.212.120:8080"
        }

if __name__ == '__main__':
    HttpManager.config([
        {
            'http': 'http://10.144.1.10:8080',
            'https': 'http://10.144.1.10:8080',
        },
        {
            'http': 'http://135.245.192.7:8000',
            'https': 'http://135.245.192.7:8000'
        },
        {
            'http': 'http://135.251.33.16:8080',
            'https': 'http://135.251.33.16:8080'
        },
        {
            "proxy_socket": "87.254.212.120:8080",
            "http": "http://87.254.212.120:8080",
            "https": "http://87.254.212.120:8080"
        }
    ])
    client = HttpManager.get_client()
    client.set_encoding('utf-8')

    for i in range(0, 100):
        code, content = client.fetch('http://www.sohu.com/')
        print('''proxy ip        port status
   Succ  Fail  rct_S rct_F rtt.M50   rtt tmout''')
        message='\n'.join([
            '{:15} {:5} {}\n  {:>5} {:>5}  {:>5} {:>5}   {:>5} {:>5} {:>2}'.format(
                p.proxy_ip, p.proxy_port, p.status,
                p.stats.get('success'), p.stats.get('failed'),
                p.stats.get('rct_success'), p.stats.get('rct_failed'),
                p.avg_rtt, p.rtt, p.timeout
            ) for p in list(HttpManager.proxies)
        ])
        print(f'--------------------------\n{message}\n')

    sys.exit(0)
    import pprint
    for one_px in list(HttpManager.proxies):
        pprint.pprint(one_px.__dict__)
