# -*- coding:utf-8 -*-
## author : cypro666
## note   : python3.4+
"""
The most commonly used utils!!!
"""
import os, sys
import json, re
from base64 import b64encode, b64decode  
from urllib.parse import urlparse, unquote_plus

from tornado.netutil import Resolver
from requests import get as httpget, head as httphead, post as httppost

from .fileio import FileIO
from .debug import print_exception, time_meter

sysout = sys.stdout.write
syserr = sys.stderr.write


REGEX_IPV4 = "^\
([0-9]|[1-9][0-9]|1\d\d|2[0-4]\d|25[0-5])\.\
([0-9]|[1-9][0-9]|1\d\d|2[0-4]\d|25[0-5])\.\
([0-9]|[1-9][0-9]|1\d\d|2[0-4]\d|25[0-5])\.\
([0-9]|[1-9][0-9]|1\d\d|2[0-4]\d|25[0-5])$"
REGEX_IPV4 = re.compile(REGEX_IPV4)

REGEX_URL = """((http|ftp|https)://[\w\-_]+(\.[\w\-_]+)+([\w\-\.,@?^=%&amp;:/~\+#]*[\w\-\@?^=%&amp;/~\+#])?)"""
REGEX_URL = re.compile(REGEX_URL, re.S)

IS_IPV4_STR = REGEX_IPV4.match


class ExtractURL(list):
    """ extract all url unduplicate in text """
    __slots__ = ('_unique')
    def __init__(self, text):
        self.extend(REGEX_URL.finditer(text))
    def unique(self):
        if not self._unique:
            self._unique = frozenset(self)
        return iter(self._unique)


_BYTES_TYPES = (bytes, bytearray)

def obj2bytes(obj):
    """ make object to bytes if supports """
    if isinstance(obj, str):
        try:
            return bytes(obj, 'ascii')
        except UnicodeEncodeError:
            try:
                return bytes(obj, 'utf-8')
            except UnicodeEncodeError:
                raise ValueError('Need ASCII or UTF-8 characters')
    if isinstance(obj, _BYTES_TYPES):
        return obj
    try:
        return memoryview(obj).tobytes()
    except Exception as e:
        raise TypeError("argument should be a bytes-like object or ASCII "
                        "string, not %r" % obj.__class__.__name__) from e


def recursive_encode(s, level = 10)->str:
    """ recursive encode `s` using base64,
        `level` is depth of recursive, the max value is 32
    """
    assert level <= 32
    if level <= 0:
        return str(s, 'utf-8')
    if not isinstance(s, (bytearray,bytes)):
        s = bytes(s, 'utf-8')
    return recursive_encode(b64encode(s), level-1)


def recursive_decode(s, level = 10)->str:
    """ recursive decode `s`(encoded by `recursive_encode`) using base64,
        `level` is depth of recursive, the max value is 32
    """
    assert level <= 32
    if level <= 0:
        return str(s, 'utf-8')
    if not isinstance(s, (bytearray,bytes)):
        s = bytes(s, 'utf-8')
    return recursive_decode(b64decode(s), level-1)



def utf8(s, errors = 'replace'):
    """ transform s to 'utf-8' coding """ 
    return str(s, 'utf-8', errors=errors)


def addhttp(uri):
    """ add 'http://' prefix to url """
    return uri if uri[:7] == 'http://' else 'http://' + uri

def subhttp(uri):
    """ strip 'http://' prefix to url """
    return uri if uri[:7] != 'http://' else uri[7:]


def addhttpb(uri):
    """ bytes version of addhttp """
    return uri if uri[:7] == b'http://' else b'http://' + uri

def subhttpb(uri):
    """ bytes version of subhttp """
    return uri if uri[:7] != b'http://' else uri[7:]


def strip_url_param(url):
    """ get url's parameters """
    return url.split('?', 1)[0]

def url3parts(url):
    """ split `url` to 3 part: raw url, url without params, url domain """
    url = subhttp(url)
    return url, strip_url_param(url), url.split('/', 1)[0]


def strip_endl(line):
    """ strip endl of line, note strip has side effect """
    return line.rstrip('\r\n')

def strip_endlb(line):
    """ strip endl of line, note strip has side effect """
    return line.rstrip(b'\r\n')


def strict_unquote(s):
    """ unquote url or others strictly, try step:
        first  'utf-8'
        second 'gbk'
        last   'latin-1'
        return None if failed
    """
    for c in ('utf-8', 'gbk', 'latin-1'):
        try:
            return unquote_plus(s, c, 'strict')
        except:
            continue
    return None



class URLString(str):
    """ a str wrapper, has more supports of URL """ 
    __slots__ = ('parsed', 'solver')
    @classmethod
    def config_dns_resolver(cls):
        Resolver.configure('tornado.netutil.BlockingResolver')
    
    def __new__(cls, s):
        """ new hook """
        return str.__new__(cls, s)
    
    def __init__(self, s):
        super().__init__()
        self.parsed = urlparse(self)
        self.solver = Resolver()
        
    def resolveip(self):
        """ DNS resolve """
        return self.solver.resolve(self.parsed.netloc, port=80).result()
    
    def HEAD(self, **kargs):
        """ http HEAD method """
        try:
            assert self.parsed.scheme
            return httphead(self, **kargs).text
        except Exception:
            if __debug__:
                print_exception('URLString.get')
            return None
    
    def GET(self, **kargs):
        """ http GET method """
        try:
            assert self.parsed.scheme
            return httpget(self, **kargs).text
        except Exception:
            if __debug__:
                print_exception('URLString.get')
            return None
    
    def POST(self, data = {}, **kargs):
        """ http POST method """
        try:
            assert self.parsed.scheme
            return httppost(self, data = data, **kargs)
        except Exception:
            if __debug__:
                print_exception('URLString.post')
            return None
    
    def __getattr__(self, attr):
        """ get attributes support """
        return self.parsed.__getattribute__(attr)



def loadjson(filename, objhook = None):
    """ load json from file return dict """
    with FileIO(filename) as f:
        return json.loads(f.readall(), encoding = 'utf-8', object_hook = objhook)


def dumpjson(objs, objhook = None, filename = None):
    """ dump json(dict) to file """
    s = ''
    if objhook: s = json.dumps(list(map(objhook, objs)), indent = 4, ensure_ascii = False)
    else:       s = json.dumps(objs, indent = 4, ensure_ascii = False)
    if filename:
        with FileIO(filename, 'w') as f:
            f.write(s)
    return s



class IncreamentID(int):
    """ auto increament id """
    def __init__(self):
        self = 0
    def __iter__(self):
        while True:
            self += 1
            yield self
    def __str__(self):
        return str(self)


class DummyLock(object):
    """ dummy lock for non-multithread """
    def __init__(self):
        pass
    def __enter__(self): 
        pass
    def __exit__(self, exctype, excinst, exctb): 
        pass
    def acquire(self, *args, **kwargs):
        pass
    def release(self, *args, **kwargs):
        pass


@time_meter(__name__)
def test(url, jsonfile):
    s = URLString('http://www.baidu.com')
    print('URL : %s\n' % s)
    
    URLString.config_dns_resolver()
    print(s.resolveip())
    print('Response :\n', s.get()[:100])
    
    assert IS_IPV4_STR('123.123.123.123')
    assert not IS_IPV4_STR('234.345.456.678')
    assert not IS_IPV4_STR('1345.456.678')
    
    try:
        objs = loadjson(jsonfile)
        print(objs[0])
        s = dumpjson(objs, filename = os.path.dirname(jsonfile)+'/test_utilities.json')
        print(len(s))
    except Exception:
        print_exception('test')

    print(ExtractURL(s))
    print('finish')



