# -*- coding: utf-8 -*-
import re
import json
import time
import copy
import requests
import tldextract
import urllib.parse
import jsonpatch
from flask import current_app as app
from app.db.models.asset_tasks import AssetTasks
from app.db.models.jobs import Job
from app.db.models.siteportraits import SitePortRaits, SitePortRaitTask
from app.celery import logger
from app.config.settings import CLICKHOUSE_CONFIG, SITE_PORTRAIT_INTERVAL
from app.libs.utils import schedule_vul, handle_custom_urls_data
ANY = "\x01*\x02"
MERGE_THRESHOLD = {
    "api": 8,
    "static": 3,
    "staticHtml": 8,
}


def ck_query(scheme, host, port):
    start_time = int(time.time()) - 3600 * SITE_PORTRAIT_INTERVAL
    query = f"SELECT DISTINCT scheme, req_method, host, req_uri, server_listen_port, content_type " \
        "FROM access " \
        f"WHERE (start_time > '{start_time}') AND (scheme = '{scheme}') AND (host = '{host}') AND (server_listen_port = {port}) AND (resp_code >= 200) AND (resp_code < 300) " \
        "LIMIT 1000"
    try:
        ret = requests.get(f"{CLICKHOUSE_CONFIG['api_url']}&user={CLICKHOUSE_CONFIG['user']}&password={CLICKHOUSE_CONFIG['password']}&query={query}", verify=False)
    except requests.ConnectionError:
        return []
    result = ret.text.split("\n")
    return result


def build_url_pattern_tree(datas, parse_url):
    tree = {}
    rows = []
    for row in datas:
        try:
            scheme, method, host, url, port, content_type = row.split("\t")
        except Exception:
            continue
        if not url:
            continue
        url = url.replace('//', '/').rstrip('/')
        if parse_url.port:
            url_data = url_tokenize(f"{scheme}://{host}:{port}{url}")
        else:
            url_data = url_tokenize(f"{scheme}://{host}{url}")
        url_data.update({
            "method": method.lower(),
            "contentType": content_type.lower(),
        })
        url_data["isStaticFileReq"] = is_static_file_req(url_data)
        rows.append(url_data)
    if len(rows) >= 1000:
        url_tree = skip_domain(build_req_url_tree(rows))
        merge_tree(tree, url_tree)
        rows = []
    if len(rows) >= 1:
        url_tree = skip_domain(build_req_url_tree(rows))
        merge_tree(tree, url_tree)
        rows = []
    tree_culling(tree)
    return tree


def build_req_url_tree(req_urls):
    tree = {}
    now_ts = int(time.time() * 1000)

    for url in req_urls:
        url_type = 'static' if url.get('isStaticFileReq') else 'api'

        if url['domain'] not in tree:
            tree[url['domain']] = {
                'type': 'domain',
                'key': 'domain',
                'value': url['domain'],
                'firstTime': now_ts,
                'lastTime': now_ts,
                'children': {},
                'urlTypeMap': {},
            }
        tree[url['domain']]['urlTypeMap'][url_type] = True
        last_path = tree[url['domain']]['children']

        for key, value in url['subDomainMap'].items():

            if value not in last_path:
                last_path[value] = {
                    'key': key,
                    'value': value,
                    'type': 'subDomain',
                    'firstTime': now_ts,
                    'lastTime': now_ts,
                    'children': {},
                    'urlTypeMap': {},
                    'patternMap': {},
                }

            last_path[value]['urlTypeMap'][url_type] = True
            last_path[value]['patternMap'][pattern_builder(value)] = True
            last_path = last_path[value]
        for key, value in url['pathMap'].items():

            if 'children' not in last_path:
                last_path['children'] = {}

            if value not in last_path['children']:
                last_path['children'][value] = {
                    'key': key,
                    'value': value,
                    'type': 'path',
                    'firstTime': now_ts,
                    'lastTime': now_ts,
                    'urlTypeMap': {},
                    'patternMap': {},
                }

            last_path['children'][value]['urlTypeMap'][url_type] = True
            last_path['children'][value]['patternMap'][pattern_builder(value)] = True
            last_path = last_path['children'][value]
        last_path['isLeaf'] = True
        last_path['contentTypeMap'] = {url['contentType'].lower(): True}
        last_path['example'] = [url['example']]

        if not last_path.get('children'):
            last_path.pop('children', "")

        for key, value in url['searchParams'].items():
            last_path['query'] = last_path.get('query', {})
            if value:
                last_path['query'][key] = {
                    'value': ANY if len(value) > 1 else value[0],
                    'firstTime': now_ts,
                    'lastTime': now_ts,
                    'patternMap': {pattern_builder(key): True},
                    'example': [url['example']],
                }

        last_path['methods'] = last_path.get('methods', {})
        last_path['methods'][url['method']] = True
    return tree


def escape_reg_exp(string):
    ret = re.escape(string.group())
    return ret


def regex_builder(s):
    ret = ''
    if any(char.isnumeric() for char in s.group()):
        # number
        ret += r'\p{N}'
    if any(char.isalpha() for char in s.group()):
        # letter
        ret += r'\p{L}'
    return f'[{ret}]{{{len(s.group())}}}'


def pattern_builder(string):
    escaped_symbols = re.sub(r'([^:a-zA-Z1-9])', escape_reg_exp, string)
    return re.sub(r'([a-zA-Z1-9]+)', regex_builder, escaped_symbols, flags=re.IGNORECASE)


def gb18030_decoder():
    return "gb18030"


def str2ab(string):
    return bytearray(string, 'utf-8')


def url_tokenize(url_str):
    url_data = {"example": url_str}
    url = urllib.parse.urlparse(url_str)

    subdomain, domain, suffix = tldextract.extract(url_str)
    url_data.update({
        "domain": f"{domain}.{suffix}",
        "subdomain": subdomain,
        "pathname": url.path
    })

    sub_domain_arr = [sub for sub in subdomain.split('.') if sub]

    url_data["subDomainMap"] = {}
    for idx, sd in enumerate(sub_domain_arr):
        url_data["subDomainMap"][f"subDomain_{idx}"] = sd

    path_arr = [urllib.parse.unquote(url_path) for url_path in url.path.split('/') if url_path]
    url_data["pathArr"] = path_arr

    if path_arr and ';' in path_arr[-1]:
        before, after = path_arr[-1].split(';', 1)
        url_data["pathArr"][-1] = before
        url_data["urlCookieMap"] = urllib.parse.parse_qs(after)

    if path_arr and '.' in path_arr[-1]:
        parts = path_arr[-1].split('.')
        url_data["fileExtension"] = parts[-1].lower()

    url_data["pathMap"] = {f"path_{i}": path for i, path in enumerate(path_arr)}
    url_data["searchParams"] = urllib.parse.parse_qs(url.query)
    return url_data


def is_static_file_req(url_obj):
    api_framework_file_extension = {
        'php': True,
        'jsp': True,
        'asp': True,
        'aspx': True,
        'do': True,
    }
    api_method_map = {
        'post': True,
        'put': True,
        'delete': True,
    }

    if len(url_obj["pathArr"]) == 0:
        return True

    if url_obj["method"] in api_method_map:
        return False

    if api_framework_file_extension.get(url_obj.get("fileExtension")):
        return False

    if 'api/' in url_obj["pathname"]:
        return False

    if not url_obj.get("fileExtension") and url_obj.get("urlCookieMap") and len(url_obj["urlCookieMap"].keys()) > 0:
        return False

    return True


def tree_culling(tree):
    queue = [tree.get('children', {})]
    while queue:
        sub_trees = queue.pop(0) or {}

        tree_keys = list(sub_trees.keys())
        if not tree_keys:
            continue
        if len(tree_keys) <= 1:
            tree = sub_trees.get(tree_keys[0], {})
            if tree.get('children'):
                queue.insert(0, tree['children'])
            continue

        any_tree = sub_trees.get(ANY)
        key_len_map = {
            'static': {},
            'api': {},
            'staticHtml': {}
        }
        for key in tree_keys:
            if key == ANY:
                continue
            route = sub_trees[key]
            if route.get('isLeaf') and not route.get('urlTypeMap', {}).get('api'):
                if next(iter(route.get('contentTypeMap', {}))) == 'html':
                    pattern_key = pattern_builder(route.get('value', ''))
                    key_len_map['staticHtml'].setdefault(pattern_key, []).append(route)
                else:
                    content_type = next(iter(route.get('contentTypeMap', {})))
                    key_len_map['static'].setdefault(content_type, []).append(route)
            else:
                pattern_key = pattern_builder(route.get('value', ''))
                key_len_map['api'].setdefault(pattern_key, []).append(route)

        if len(key_len_map['staticHtml']) >= MERGE_THRESHOLD['staticHtml'] * 2:
            key_len_map['staticHtml'] = {
                strip_pattern_length(pattern): routes
                for pattern, routes in key_len_map['staticHtml'].items()
            }

        if len([p for p in key_len_map['api'] if len(p.split(r'\p')) > 3]) >= MERGE_THRESHOLD['api'] * 3:
            key_len_map['api'] = {
                strip_pattern_length(pattern) if len(pattern.split(r'\p')) > 3 else pattern: routes
                for pattern, routes in key_len_map['api'].items()
            }

        for key, threshold in MERGE_THRESHOLD.items():
            for routes in key_len_map[key].values():
                if (any_tree and (
                        all(
                            x.get('patternMap', {}).get(next(iter(x.get('patternMap', {})))) in any_tree.get('patternMap', {})
                            or any(
                                new_pattern in any_tree.get('patternMap', {})
                                for new_pattern in any_tree.get('patternMap', {}).keys()
                                if new_pattern in x.get('value', '')
                            )
                            for x in routes
                        )
                        if key != 'static'
                        else all(
                            next(iter(x.get('contentTypeMap', {}))) in any_tree.get('contentTypeMap', {})
                            for x in routes
                        )
                    )) or len(routes) >= threshold:
                    for x in routes:
                        sub_trees.pop(x.get('value', ''), None)

                    if not any_tree:
                        any_tree = merge_tree(*routes)
                    else:
                        any_tree = merge_tree(any_tree, *routes)

                    any_tree['value'] = ANY
                    sub_trees[ANY] = any_tree

                    if any_tree.get('children'):
                        queue.insert(0, any_tree['children'])
                else:
                    for x in routes:
                        if x.get('children'):
                            queue.insert(0, x['children'])

    return tree


def skip_domain(tree):
    ret = next(iter(tree.values()), {})

    next_node = ret.get('children', {}) and next(iter(ret['children'].values()))
    while next_node and next_node.get('type') != 'path':
        ret = next_node
        next_node = next_node.get('children', {}) and next(iter(next_node['children'].values()))

    return ret


def merge_tree(*args):
    first = args[0]
    queue = [[first, next_item] for next_item in args[1:] if next_item]

    while queue:
        a, b = queue.pop(0)

        a_child = a
        b_child = b

        if a.get('type') and a.get('value') and a['type'] == b.get('type'):
            if a['value'] != b['value']:
                a['value'] = ANY
            if b.get('urlTypeMap'):
                a['urlTypeMap'] = {**a.get('urlTypeMap', {}), **b['urlTypeMap']}
            if b.get('contentTypeMap'):
                a['contentTypeMap'] = {**a.get('contentTypeMap', {}), **b['contentTypeMap']}
            if b.get('example'):
                a['example'] = list(set((b.get('example', []) + a.get('example', []))[:6]))
            if b.get('patternMap'):
                a['patternMap'] = {**a.get('patternMap', {}), **b['patternMap']}
                a['patternMap'] = combine_similar_regex_pattern(a['patternMap'])
            if b.get('firstTime'):
                a['firstTime'] = a.get('firstTime') or b['firstTime']
                if a['firstTime'] > b['firstTime']:
                    a['firstTime'] = b['firstTime']
            if b.get('lastTime'):
                a['lastTime'] = a.get('lastTime') or b['lastTime']
                if a['lastTime'] < b['lastTime']:
                    a['lastTime'] = b['lastTime']

            if b.get('children'):
                if not a.get('children'):
                    a['children'] = {}
                a_child = a['children']
                b_child = b['children']
            elif b.get('query'):
                if not a.get('query'):
                    a['query'] = {}
                a_child = a['query']
                b_child = b['query']
            else:
                a_child = None
                b_child = None

        if b_child and isinstance(b_child, dict):
            b_keys = list(b_child.keys())
            b_keys.reverse()
            for key in b_keys:
                val = b_child[key]

                if key not in a_child:
                    a_child[key] = val
                elif isinstance(val, dict):
                    if a_child[key].get('value') != val.get('value'):
                        a_val = a_child[key]
                        a_val['value'] = ANY
                        del a_child[key]
                        a_child[ANY] = a_val
                        key = ANY
                    queue.insert(0, [a_child[key], val])

    return first


def combine_similar_regex_pattern(pattern_map):
    pattern_group = {}
    for pattern in pattern_map:
        key = strip_pattern_length(pattern)
        pattern_group.setdefault(key, []).append(pattern)

    result = {}
    for patterns in pattern_group.values():
        if len(patterns) == 1:
            result[patterns[0]] = True
        else:
            pattern_length = []
            for pattern in patterns:
                matches = list(re.finditer(r"(\[(?:\\p{L}|\\p{N}|\\p{N}\\p{L})\])(?:{(\d+)(?:,(\d+))?})(?:([^:\\p{N}\\p{L}[]+))?", pattern))
                pattern_length.append([{"p": match.group(1), "start": int(match.group(2)), "end": int(match.group(3) or match.group(2)), "delimiter": match.group(4)} for match in matches])

            merged_pattern = []
            for idx in range(len(pattern_length[0])):
                merged_item = pattern_length[0][idx].copy()
                for pattern in pattern_length[1:]:
                    item = pattern[idx]
                    if merged_item["start"] > item["end"]:
                        merged_item["start"] = item["end"]
                    if merged_item["end"] < item["start"]:
                        merged_item["end"] = item["start"]
                    if merged_item["start"] > item["start"]:
                        merged_item["start"] = item["start"]
                    if merged_item["end"] < item["end"]:
                        merged_item["end"] = item["end"]

                ret = merged_item["p"]
                if merged_item["start"] == merged_item["end"]:
                    ret += f"{{{merged_item['start']}}}"
                else:
                    ret += f"{{{merged_item['start']},{merged_item['end']}}}"

                merged_pattern.append(ret + (merged_item["delimiter"] or ""))

            result["".join(merged_pattern)] = True

    return result


def strip_pattern_length(pattern):
    # 移除长度 \p{L}{2,3} => \p{L}
    return re.sub(r'(\[(?:\\p{L}|\\p{N}|\\p{N}\\p{L})\])(?:{\d+(?:,\d+)?})', r'\1', pattern)


def path_or(default, path, obj):
    try:
        for key in path:
            obj = obj[key]
        return obj
    except (KeyError, TypeError, IndexError):
        return default


def get_changed_urls(patches, url_tree):

    urls = set()
    for patch in patches:
        field_path = patch['path'][1:].split('/')

        query_idx = 0
        for i in range(len(field_path) - 2, -1, -2):
            if field_path[i] == 'query':
                query_idx = i
                break
        if query_idx:
            field_path = field_path[:query_idx]

        node = path_or({}, field_path, url_tree)
        example = path_or('', ['example', 0], node)
        if not isinstance(node, dict):
            continue
        if (query_idx > 0 or node.get('urlTypeMap', {}).get('api') or node.get('contentTypeMap', {}).get('text/html')) and example:
            urls.add(example)

    return list(urls)


def update_task(asset_task, change_urls):
    asset_task.vulSettings.collect.includeUrl = change_urls
    job = Job.objects.filter(id=asset_task.jobId).first()
    if not job:
        return
    schedule_vul(
        job, asset_task.vulSettings,
        immediate_exec=True,
        addition={},
        prelogin=asset_task.prelogin.to_dict(),
        custom_monitor_urls=handle_custom_urls_data(asset_task.customMonitorPage.to_dict()),
        asset_task=asset_task,
        is_manual=True,
        alert_setting=asset_task.alertSettings,
    )
    asset_task.save()


def omit_nested(data, field):
    if isinstance(data, dict):
        result = {}
        for key, value in data.items():
            if key not in field:
                result[key] = omit_nested(value, field)
        return result
    else:
        return data


def site_portrait_task(target):
    url = urllib.parse.urlparse(target)
    port = url.port
    if not port:
        port = 443 if url.scheme == "https" else 80
    ret = ck_query(url.scheme, url.netloc, port)
    url_pattern_tree = build_url_pattern_tree(ret, url)
    logger.info(f"Target {target} get {len(ret) - 1} urls, build pattern tree done")
    uids = SitePortRaits.objects.filter(target=target).distinct("uid")
    query = {
        "enableMonitor": True,
        "taskGroupType": "web_security",
        "vulSettings__enableSitePortraitTriggerMonitor": True,
        "vulSettings__enable": True,
        "targetUrl": target
    }
    asset_uids = AssetTasks.objects.filter(**query).distinct("uid")
    for uid in set(asset_uids).difference(set(uids)):
        logger.info(f"Create site portrait uid:{uid} target:{target}")
        data = {
            "uid": uid,
            "target": target,
            "portrait": json.dumps(url_pattern_tree),
            "version": 1,
            "is_changed": False
        }
        SitePortRaits(**data).save()
    for uid in set(asset_uids).intersection(set(uids)):
        site_portrait = SitePortRaits.objects.filter(target=target, uid=uid).first()
        prev_site_portrait = json.loads(site_portrait.portrait)
        tmp = copy.deepcopy(prev_site_portrait)
        current_site_portrait = merge_tree(tmp, url_pattern_tree)
        patches = jsonpatch.JsonPatch.from_diff(
            omit_nested(current_site_portrait, ["patternMap", 'example', 'lastTime', 'firstTime']),
            omit_nested(prev_site_portrait, ["patternMap", 'example', 'lastTime', 'firstTime']),
        ).patch
        site_portrait.version += 1
        if patches:
            logger.info(f"Changed site portrait uid:{uid} target:{target}")
            data = {
                "uid": uid,
                "target": target,
                "version": site_portrait.version,
                "is_changed": True,
                "patches": json.dumps(patches)
            }
            task = SitePortRaitTask(**data)
            task.save()
        site_portrait.is_changed = True
        site_portrait.portrait = json.dumps(current_site_portrait)
        site_portrait.save()
        change_urls = get_changed_urls(patches, current_site_portrait)
        if change_urls:
            query = {
                "uid": uid,
                "targetUrl": target,
                "enableMonitor": True,
                "taskGroupType": "web_security",
                "vulSettings__enableSitePortraitTriggerMonitor": True,
                "vulSettings__enable": True,
            }
            for asset_task in AssetTasks.objects.filter(**query):
                logger.info(f"{target} has {len(change_urls)} change_urls")
                update_task(asset_task, change_urls)


def site_portraits():
    query = {
        "enableMonitor": True,
        "taskGroupType": "web_security",
        "vulSettings__enableSitePortraitTriggerMonitor": True,
        "vulSettings__enable": True
    }
    target_urls = AssetTasks.objects.filter(**query).distinct("targetUrl")
    logger.info(f"Site portrait start: {len(target_urls)} targets")
    for target_url in target_urls:
        try:
            site_portrait_task(target_url)
        except Exception as e:
            app.logger.exception(e)


if __name__ == "__main__":
    from mongoengine import connect
    from app.config import settings
    connect(**settings.MONGODB_SETTINGS[0])
    site_portraits()
