#!/usr/bin/python3
# -*- coding: utf-8 -*-

import os, logging, json
import re, hashlib, time, asyncio, base64
from datetime import datetime

from sqlalchemy import func
import aiohttp
from aiohttp_socks import ProxyType, ProxyConnector, ChainProxyConnector

import db

COOKIE_NAME = 'awesession'
_COOKIE_KEY = 'awesession'

def setCookie(user, rep, max_age=86400):
    if user:
        rep.set_cookie(COOKIE_NAME, user2cookie(user, max_age),
                       max_age=max_age, httponly=True)
    else:
        rep.set_cookie(COOKIE_NAME, '', max_age=0, httponly=True)

# 计算加密cookie:
def user2cookie(user, max_age):
    '''
    build cookie string by: id-expires-sha1
    '''
    expires = str(int(time.time() + max_age))
    s = '%s-%s-%s-%s' % (user.id, user.password, expires, _COOKIE_KEY)
    L = [str(user.id), expires, hashlib.sha1(s.encode('utf-8')).hexdigest()]
    return '-'.join(L)


async def cookie2user(request):
    '''
    Parse cookie and load user if cookie is valid.
    '''
    cookie_str = request.cookies.get(COOKIE_NAME)
    if not cookie_str:
        return None
    try:
        L = cookie_str.split('-')
        if len(L) != 3:
            return None
        uid, expires, sha1 = L
        if int(expires) < time.time():
            return None
        session = db.create_session()
        user = session.query(db.User).filter(db.User.id == uid).first()
        if user is None:
            return None
        s = '%s-%s-%s-%s' % (uid, user.password, expires, _COOKIE_KEY)
        if sha1 != hashlib.sha1(s.encode('utf-8')).hexdigest():
            logging.info('invalid sha1')
            return None
        user.passwd = '******'
        return user
    except Exception as e:
        logging.exception(e)
        return None

class encoder(json.JSONEncoder):
    def default(self, obj):
        if isinstance(obj, datetime):
            return obj.strftime("%Y-%m-%d %H:%M:%S")
        if isinstance(obj, db.User):
            return obj.single_to_dict()

def record(req, rep, handler, params, resp_data):
    session = db.create_session()
    records = db.Records(remote=req.remote,
        reqline='%s %s HTTP/%s.%s' % (req.method, req.path_qs, req.version.major, req.version.minor),
        reqparam=json.dumps(params),
        reqheaders=json.dumps({k: v for k, v in req.headers.items()}),
        status='HTTP/%s.%s %s %s' % (req.version.major, req.version.minor, rep.status, rep.reason),
        repheaders=json.dumps({k: v for k, v in rep.headers.items()}),
        repdata=resp_data,
        handler=handler.__name__)
    # records = db.Records(name=name, password=password, email=email, admin=False)
    session.add(records)
    session.commit()

def filter_fields_dict(item, fields=[]):
    return {key: val for key, val in item.items() if key in fields}

def filter_fields_dict_list(items, fields=[]):
    return [filter_fields_dict(item, fields) for item in items]

async def load_page(tableName, index, *, pagesize=12, filter={'name': None, 'status': None}, order_by={'id': True, 'created_date': True}):
    table_ = getattr(db, tableName)
    session = db.create_session()
    filter_ = []
    if filter.get('path', None):
        filter_.append(table_.path == filter.get('path', '/'))
    order_by_ = []
    for key, rev in order_by.items():
        if rev:
            order_by_.append(table_.__dict__[key])
        else:
            order_by_.append(table_.__dict__[key].desc())
    total = session.query(func.count(table_.id)).filter(*filter_).scalar()
    items = [item.single_to_dict() for item in session.query(table_).filter(*filter_).order_by(*order_by_).offset((index - 1) * pagesize).limit(pagesize)]
    pages = ((total - 1) // pagesize) + 1 if pagesize > 0 else 0
    return {'page': {'total': total, 'pages': pages, 'index': index, 'size': pagesize}, 'items': items}

async def load_items(tableName, filter={'name': None, 'status': None}, in_={}, order_by={'id': True, 'created_date': True}):
    table_ = getattr(db, tableName)
    session = db.create_session()
    filter_ = []
    if filter.get('path', None):
        filter_.append(table_.path == filter.get('path', '/'))
    for key, val in filter.items():
        if val is None: continue
        filter_.append(getattr(table_, key) == val)
    if len(in_.keys()) > 0:
        for key, val in in_.items():
            if not isinstance(val, list): continue
            filter_.append(getattr(table_, key).in_(val))
    order_by_ = []
    for key, rev in order_by.items():
        if rev:
            order_by_.append(table_.__dict__[key])
        else:
            order_by_.append(table_.__dict__[key].desc())
    items = [item.single_to_dict() for item in session.query(table_).filter(*filter_).order_by(*order_by_)]
    return {'items': items}


def save(tableName = '', items = []):
    if len(tableName) is None: return
    table_ = getattr(db, tableName)
    if table_ is None : return
    if len(items) <= 0: return
    session = db.create_session()
    item_dict = {item['id']: item for item in items if item.__contains__('id')}
    print('-' * 25)
    print(items)
    print('-' * 25)
    items_ = [item for item in session.query(table_).filter(table_.id.in_(item_dict.keys()))]

    for item in items_:
        for key, value in item_dict[item.id].items():
            if key.startswith('updated_') or key.startswith('created_'): continue
            setattr(item, key, value)
        item.updated_at = datetime.now()
        item.updated_date = item.updated_at.strftime('%Y%m%d')
        item.updated_stamp = item.updated_at.timestamp()

    for obj in items:
        if obj.__contains__('id'): continue
        item = table_()
        print('-' * 25)
        print(obj)
        print('-' * 25)
        for key, value in obj.items():
            if key.startswith('updated_') or key.startswith('created_'): continue
            setattr(item, key, value)
        session.add(item)
    session.commit()


def delete(tableName = '', ids = []):
    if len(tableName) is None: return
    table_ = getattr(db, tableName)
    if table_ is None : return
    if len(ids) <= 0: return
    session = db.create_session()
    items = [item for item in session.query(table_).filter(table_.id.in_(ids))]
    for item in items:
        if item is not None and hasattr(item, 'content_type') and item.content_type == 'd':
            delete(tableName, [item.id for item in session.query(table_).filter_by(table_.path=='%s/%s' % (item.path, item.name))], session)
        session.delete(item)
    session.commit()


def _download(url, *, filename=None, timeout = 300, proxy = '', headers = {}, chunk_size=1024, dump=True):
    event_loop = asyncio.get_event_loop()
    return_value = None
    try:
        return_value = event_loop.run_until_complete(
            download(url, filename = filename, timeout = timeout, proxy = proxy, headers = headers, chunk_size=chunk_size, dump=dump)
        )
    finally:
        event_loop.close()
    return return_value


async def download(url, *, filename=None, timeout = 120, proxy = '', headers = {}, chunk_size=1024, dump=True):
    timeout_ = {'total': 60*2, 'connect': None, 'sock_connect': None, 'sock_read': None}
    if isinstance(timeout, int):
        timeout_ = {'total': timeout, 'connect': None, 'sock_connect': None, 'sock_read': None}
    elif isinstance(timeout, dict):
        timeout_ = dict(timeout, **timeout)
    timeout = aiohttp.ClientTimeout(**timeout_)

    connector = aiohttp.TCPConnector(verify_ssl=False)
    logging.info('[+]proxy: %s' % proxy)
    if proxy and len(proxy) > 0:
        connector = ProxyConnector.from_url(proxy, verify_ssl=False)
    logging.info('[get]%s' % url)
    result = {'size': 0, 'duration': 0}
    start = time.time()
    async with aiohttp.ClientSession(connector=connector, timeout=timeout, trust_env=True) as session:
        try:
            async with session.get(url, headers=headers) as resp:
                result['duration'] = time.time() - start
                curre_size = 0
                total_size = 0
                if resp.status == 206:
                    # Content-Range: bytes 0-499/22400
                    content_range = resp.headers.get('Content-Range', 0)
                    if content_range:
                        exp = re.match('bytes (\d*)-(\d*)/(\d*)', content_range)
                        if exp:
                            curre_size = int(exp.group(2))
                            total_size = int(exp.group(2)) - int(exp.group(1))
                    else:
                        total_size = int(resp.headers.get('Content-Length', 0))
                if resp.status == 200:
                    total_size = int(resp.headers.get('Content-Length', 0))
                if filename is not None:
                    if not os.path.exists(filename):
                        fp = open(filename, "wb")
                        fp.truncate(total_size + 1)
                        fp.close()
                    with open(filename, "r+b") as fp:
                        fp.seek(curre_size, 0)
                        while True:
                            chunk = await resp.content.read(chunk_size)
                            if not chunk:
                                result['size'] = curre_size
                                break
                            fp.write(chunk)
                            curre_size += len(chunk)
                else:
                    filename = ''
                    while True:
                        chunk = await resp.content.read(chunk_size)
                        if not chunk:
                            result['size'] = len(filename)
                            break
                        filename += chunk.decode('utf8', errors='ignore')
        except aiohttp.client_exceptions.ClientConnectorError as e:
            logging.error(f'[ClientConnectorError]: {e.message}')
            result['duration'] = -1
        except aiohttp.client_exceptions.ServerDisconnectedError as e:
            logging.error(f'[ServerDisconnectedError]: {e.message}')
            result['duration'] = -1
        except asyncio.TimeoutError as e:
            logging.error(f'[TimeoutError]: {e}')
            result['duration'] = -1
        except ConnectionResetError as e:
            logging.error(f'[ConnectionResetError]: {e}')
            result['duration'] = -1
    logging.info('download %s -> %s(%sb)' % (url, filename[:32] if filename else filename, result))
    return filename, result


def base64_decode(text = b''):
    return base64.decodebytes(text).decode("utf-8", errors='ignore')

def base64_decode_file(filename=''):
    result = ''
    if (os.path.exists(filename)):
        with open(filename, 'rb') as src:
            result = base64_decode(src.read())
            # result = base64.decodebytes(src.read()).decode("utf-8")
    return result

def save_v2ray(server):
    if not isinstance(server, dict): return
    keyRelate = [('url', 'url'), ('protocol', 'protocol'),
                    ('subscribe', 'sub'), ('address', 'add'),
                    ('port', 'port'), ('password', 'password'),
                    ('key', 'id'), ('aid', 'aid'),
                    ('security', 'scy'), ('remarks', 'ps'),
                    ('network', 'net'), ('type', 'type'),
                    ('host', '_'), ('tls', 'tls')]
    session = db.create_session()
    v2ray = session.query(db.V2ray).filter(db.V2ray.key == server.get('id', '')).first()
    if not v2ray:
        v2ray = db.V2ray(SNI='', allowInsecure=False)
    for (key1, key2) in keyRelate:
        if server.get(key2, '') != '':
            setattr(v2ray, key1, server[key2])
    if v2ray.id is None:
        session.add(v2ray)
    session.commit()

if __name__ == '__main__':
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument("-d", "--download", help="download <url>",
        action="store_true", dest="is_download", default=False)
    parser.add_argument("--text", help="--text <text>")
    parser.add_argument("--filename", help="--filename <filename>")
    parser.add_argument("--url", help="--url <vemss|ss>")
    parser.add_argument("--proxy", help="--proxy <proxy url>")
    args = parser.parse_args()

    if args.is_download:
        if args.url is None:
            parser.print_help()
            exit(-1)
        result = _download(args.url, filename=args.filename, proxy=args.proxy)
        print(f'{result[0]}\n{result[1]}')
