#!/usr/bin/env python3

# Copyright (c) 2020-2021 Fpemud <fpemud@sina.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.


import functools
import urllib.parse
from datetime import datetime
from ._util import HelperWildcard
from ._const import CachePolicy, SiteUtil
from ._config import Config
from ._backend import Backend
from ._cache import DiskCache


@functools.cache            # make it singleton
class Main:

    def __init__(self, system_or_user=False, cache_policy=CachePolicy.DEFAULT):
        assert isinstance(cache_policy, CachePolicy)

        self._cfg = Config()

        self._bSysOrUser = system_or_user
        self._cachePolicy = cache_policy

        self._backendDict = {}
        for backendName, backendCfg in self._cfg.get_backend_dict().items():
            self._backendDict[backendName] = Backend.new(self._cfg, backendName, backendCfg)

        # FIXME
        # data in self._dbDict has no aging time
        # this is enough for oneshot application such as command line programs
        # daemon program can invalidated self._dbDict through self.clear() by its own policy (for example clear after every request or clear periodically)
        self._dbDict = {}

        # FIXME
        self._countryCode = "CN"
        # self._countryCode = None
        # with urllib.request.urlopen("https://ipinfo.io", timeout=self._cfg.get_update_timeout()) as resp:
        #     self._countryCode = json.load(resp)["country"]

    def get_all_mirror_names(self):
        return list(self._getMirrorNames())

    def has_mirror(self, mirror_name):
        return mirror_name in self._getMirrorNames()

    def get_target_urls(self, parsed_mirror_url, filter_expr, sort_expr, for_read, max_count, **kwargs):
        assert parsed_mirror_url.scheme == "mirror"
        assert max_count == -1 or max_count >= 2

        dbDict = self._getData([parsed_mirror_url.netloc])
        if parsed_mirror_url.netloc not in dbDict:
            raise ValueError("mirror \"%s\" not found" % (parsed_mirror_url.netloc))

        if filter_expr is None:
            def filter_expr(x):
                return True
        if sort_expr is None:
            def sort_expr(x):
                return True

        # mirror://github/foo/bar -> foo/bar
        rest = ""
        if True:
            rest += parsed_mirror_url.path
        if parsed_mirror_url.params != "":
            rest += ";%s" % (parsed_mirror_url.params)
        if parsed_mirror_url.query != "":
            rest += "?%s" % (parsed_mirror_url.query)
        if parsed_mirror_url.fragment != "":
            rest += "#%s" % (parsed_mirror_url.fragment)

        targetSourceList = [x for x in dbDict[parsed_mirror_url.netloc]["sources"] if filter_expr(x)]

        targetMirrorList = []
        if True:
            preList = dbDict[parsed_mirror_url.netloc]["mirrors"]
            preList = [x for x in preList if not x["fallback-mirror"]]
            if len(targetMirrorList) == 0:
                targetMirrorList = list(preList)
                targetMirrorList = [x for x in targetMirrorList if self._getFilterExprLocal(filter_expr, for_read)(x)]
                targetMirrorList = [x for x in targetMirrorList if x["partial-mirror"] is None or HelperWildcard.match_patterns(rest, x["partial-mirror"])]
            if len(targetMirrorList) == 0:
                targetMirrorList = list(preList)
                targetMirrorList = [x for x in targetMirrorList if self._getFilterExpr(filter_expr, for_read)(x)]
                targetMirrorList = [x for x in targetMirrorList if x["partial-mirror"] is None or HelperWildcard.match_patterns(rest, x["partial-mirror"])]

        targetFallbackMirrorList = []
        if True:
            preList = dbDict[parsed_mirror_url.netloc]["mirrors"]
            preList = [x for x in preList if x["fallback-mirror"]]
            if len(targetFallbackMirrorList) == 0:
                targetFallbackMirrorList = list(preList)
                targetFallbackMirrorList = [x for x in targetFallbackMirrorList if self._getFilterExprLocal(filter_expr, for_read)(x)]
                targetFallbackMirrorList = [x for x in targetFallbackMirrorList if x["partial-mirror"] is None or HelperWildcard.match_patterns(rest, x["partial-mirror"])]
            if len(targetFallbackMirrorList) == 0:
                targetFallbackMirrorList = list(preList)
                targetFallbackMirrorList = [x for x in targetFallbackMirrorList if self._getFilterExpr(filter_expr, for_read)(x)]
                targetFallbackMirrorList = [x for x in targetFallbackMirrorList if x["partial-mirror"] is None or HelperWildcard.match_patterns(rest, x["partial-mirror"])]

        if max_count == -1:
            objList = targetMirrorList + targetSourceList + targetFallbackMirrorList
        else:
            objList = []
            if len(targetSourceList) > 0:
                objList += targetMirrorList[:min(len(targetMirrorList), max_count - 1)]
                objList += targetSourceList[:max_count - len(objList)]
            else:
                objList += targetMirrorList[:max_count]
            objList += targetFallbackMirrorList[:max_count - len(objList)]

        ret = []
        for obj in objList:
            if "url" not in obj:
                continue
            x = obj["url"].rstrip("/")
            if "chroot" not in obj:
                x += rest
            else:
                x += rest[len(obj["chroot"]):]
            ret.append(x)

        return ret

    def get_target_hosts(self, mirror_name, filter_expr, sort_expr, for_read, max_count, **kwargs):
        assert max_count == -1 or max_count >= 2

        dbDict = self._getData([mirror_name])
        if mirror_name not in dbDict:
            raise ValueError("mirror \"%s\" not found" % (mirror_name))

        if filter_expr is None:
            def filter_expr(x):
                return True
        if sort_expr is None:
            def sort_expr(x):
                return True

        targetSourceList = [x for x in dbDict[mirror_name]["sources"] if filter_expr(x)]

        targetMirrorList = []
        if True:
            preList = dbDict[mirror_name]["mirrors"]
            preList = [x for x in preList if not x["fallback-mirror"]]
            if len(targetMirrorList) == 0:
                targetMirrorList = list(preList)
                targetMirrorList = [x for x in targetMirrorList if self._getFilterExprLocal(filter_expr, for_read)(x)]
            if len(targetMirrorList) == 0:
                targetMirrorList = list(preList)
                targetMirrorList = [x for x in targetMirrorList if self._getFilterExpr(filter_expr, for_read)(x)]

        targetFallbackMirrorList = []
        if True:
            preList = dbDict[mirror_name]["mirrors"]
            preList = [x for x in preList if x["fallback-mirror"]]
            if len(targetFallbackMirrorList) == 0:
                targetFallbackMirrorList = list(preList)
                targetFallbackMirrorList = [x for x in targetFallbackMirrorList if self._getFilterExprLocal(filter_expr, for_read)(x)]
            if len(targetFallbackMirrorList) == 0:
                targetFallbackMirrorList = list(preList)
                targetFallbackMirrorList = [x for x in targetFallbackMirrorList if self._getFilterExpr(filter_expr, for_read)(x)]

        ret = []
        if max_count == -1:
            ret = targetMirrorList + targetSourceList + targetFallbackMirrorList
        else:
            if len(targetSourceList) > 0:
                ret += targetMirrorList[:min(len(targetMirrorList), max_count - 1)]
                ret += targetSourceList[:max_count - len(ret)]
            else:
                ret += targetMirrorList[:max_count]
            ret += targetFallbackMirrorList[:max_count - len(ret)]
        ret = [x["host"] for x in ret if "host" in x]
        return ret

    def clear(self, mirror_name):
        if mirror_name is None:
            self._dbDict.clear()
        else:
            if mirror_name in self._dbDict:
                del self._dbDict[mirror_name]

    def _getMirrorNames(self):
        dtNow = datetime.now()
        dc = DiskCache(self._cfg, self._bSysOrUser, False)
        cachableBackendDict, uncachableBackendDict = self.__splitBackendDict()

        # update cache for all cachable backends
        if len(cachableBackendDict) > 0:
            backendDataDict = {}
            backendAgingTimeDict = {}
            for backendName, mirrorNames in dc.getNeedUpdateBackendNameMirrorNamesDict(dtNow, cachableBackendDict, None).items():
                backendDataDict[backendName], backendAgingTimeDict[backendName] = self._backendDict[backendName].get_data_and_aging_time(mirrorNames)
            dc.update(dtNow, backendDataDict, backendAgingTimeDict, None)

        # get mirror names of all cachable backends from cache
        ret = set(dc.getMirrorNames(dtNow, cachableBackendDict))

        # get mirror names of all other backends and merge them
        for backendName, backendObj in uncachableBackendDict.items():
            ret |= backendObj.get_mirror_names()

        return ret

    def _getData(self, mirrorNameList):
        dtNow = datetime.now()
        dc = DiskCache(self._cfg, self._bSysOrUser, False)
        cachableBackendDict, uncachableBackendDict = self.__splitBackendDict()

        # update cache for all cachable backends
        if len(cachableBackendDict) > 0:
            backendDataDict = {}
            backendAgingTimeDict = {}
            for backendName, mirrorNames in dc.getNeedUpdateBackendNameMirrorNamesDict(dtNow, cachableBackendDict, mirrorNameList).items():
                backendDataDict[backendName], backendAgingTimeDict[backendName] = self._backendDict[backendName].get_data_and_aging_time(mirrorNames)
            dc.update(dtNow, backendDataDict, backendAgingTimeDict, mirrorNameList)

        # get data of all cachable backends from cache
        ret = dc.getData(dtNow, cachableBackendDict, mirrorNameList)

        # get data of all other backends and merge them
        for backendName, backend in uncachableBackendDict.items():
            data, _ = backend.get_data_and_aging_time(mirrorNameList)
            for name, v in data.items():
                ret.setdefault(name, {
                    "sources": [],
                    "mirrors": [],
                })
                for s in v["sources"]:
                    if not SiteUtil.find_object_in_list(s, ret[name]["sources"]):
                        ret[name]["sources"].append(s)
                for m in v["mirrors"]:
                    if not SiteUtil.find_object_in_list(m, ret[name]["mirrors"]):
                        ret[name]["mirrors"].append(m)
        return ret

    def _getFilterExprLocal(self, filter_expr, for_read):
        def _expr(x):
            if x["role"] == "local-mirror" or ("country-code" in x and x["country-code"] == self._countryCode):
                if "ip" in x["address-families"]:
                    if for_read or not x["readonly-mirror"]:
                        if filter_expr(x):
                            return True
            return False
        return _expr

    def _getFilterExpr(self, filter_expr, for_read):
        def _expr(x):
            if "ip" in x["address-families"]:
                if for_read or not x["readonly-mirror"]:
                    if filter_expr(x):
                        return True
            return False
        return _expr

    @functools.cache
    def __splitBackendDict(self):
        cachable, uncacheable = {}, {}
        for backendName, backendObj in self._backendDict.items():
            if not backendObj.is_local():
                cachable[backendName] = backendObj
            else:
                uncacheable[backendName] = backendObj
        return (cachable, uncacheable)


def init(system_or_user=False, cache_policy=CachePolicy.DEFAULT):
    global _main
    assert _main is None
    _main = Main(system_or_user, cache_policy)


def target_urls(url, protocols=None, filter_key=None, sort_key=None, sys_or_user=False, for_read=False, max_count=-1, **kwargs):
    global _main
    if _main is None:
        _main = Main()

    mirror_url_obj = urllib.parse.urlparse(url)
    if mirror_url_obj.scheme != "mirror":
        raise ValueError("only accept mirror://")

    if protocols is not None:
        assert len(protocols) > 0
        if filter_key is None:
            def filter_key(x):
                return True
        filter_key = functools.partial(lambda p, f, x: x["protocol"] in p and f(x), protocols, filter_key)

    return _main.get_target_urls(mirror_url_obj, filter_key, sort_key, for_read, max_count, **kwargs)


def target_hosts(host, filter_key=None, sort_key=None, sys_or_user=False, for_read=False, max_count=-1, **kwargs):
    global _main
    if _main is None:
        _main = Main()

    return _main.get_target_hosts(host, filter_key, sort_key, for_read, max_count, **kwargs)


_main = None
