import os
import sys
import time
import sqlite3
import xml.etree.ElementTree as ET

from threading import Lock
from .base import Store, get_decompression_path, decompression, download


class Repo():
    def __init__(self, repo, arch, url, workdir, update) -> None:
        self.lock = Lock()
        self.repo = repo
        self.arch = arch
        self.url = url

        self.repo_info = Repo.update_repodata(workdir, url, update)

        # 创建filelist的索引 qlite创建索引必须创建在本数据库内 所以先创建
        filelists_conn = sqlite3.connect(
            self.repo_info['filelists_db'], check_same_thread=False)
        filelists_cursor = filelists_conn.cursor()
        filelists_cursor.execute(
            'create index if not exists provide_file on filelist(dirname)')
        filelists_conn.commit()
        filelists_conn.close()

        # 创建sql链接 同时创建索引
        self.sql_conn = sqlite3.connect(
            self.repo_info['primary_db'], check_same_thread=False)
        self.sql_cursor = self.sql_conn.cursor()

        self.sql_cursor.execute(
            'create unique index if not exists pkgKey on packages(pkgKey)')
        self.sql_cursor.execute(
            'create index if not exists pkg_name on packages(name, version, release)')
        self.sql_cursor.execute(
            'create index if not exists require_name on requires(name)')
        self.sql_cursor.execute(
            'create index if not exists provide_name on provides(name)')
        self.sql_conn.commit()

        # attach
        self.sql_cursor.execute(
            f"attach '{self.repo_info['filelists_db']}' as filelists")

    def __del__(self):
        with self.lock:
            self.sql_cursor.close()

        self.sql_conn.close()

    def search_packages(self, target, limit):
        ssql = f"select \
            packages.pkgKey, packages.name, packages.version, packages.release, packages.summary \
            from packages \
            where packages.name || '-' || packages.version || '-' || packages.release like '%{target}%' \
            limit {limit}"

        with self.lock:
            for (pkgKey, name, version, release, summary) in self.sql_cursor.execute(ssql).fetchall():
                yield pkgKey, self.repo, self.arch, f"{name}-{version}-{release}", summary

    def search_requires(self, target, limit):
        ssql = f"select \
            packages.pkgKey, packages.name, packages.version, packages.release, packages.summary \
            from packages, requires \
            where packages.pkgKey=requires.pkgKey and requires.name like '{target}%' \
            limit {limit}"

        with self.lock:
            for (pkgKey, name, version, release, summary) in self.sql_cursor.execute(ssql).fetchall():
                yield pkgKey, self.repo, self.arch, f"{name}-{version}-{release}", summary

    def search_provides(self, target, limit):
        ssql = f"select \
            packages.pkgKey, packages.name, packages.version, packages.release, packages.summary \
            from packages, provides \
            where packages.pkgKey=provides.pkgKey and provides.name like '{target}%' \
            limit {limit}"

        with self.lock:
            for (pkgKey, name, version, release, summary) in self.sql_cursor.execute(ssql).fetchall():
                yield pkgKey, self.repo, self.arch, f"{name}-{version}-{release}", summary

    def search_filelist(self, target: str, limit):
        if not target[0] == '/':
            return

        dirname, filename = target.rsplit('/', maxsplit=1)

        ssql = f"select \
        packages.pkgKey, packages.name, packages.version, packages.release, packages.summary, filelist.filenames \
        from packages, filelist \
        where packages.pkgKey=filelist.pkgKey and filelist.dirname = '{dirname}'"

        with self.lock:
            for (pkgKey, name, version, release, summary, filenames) in self.sql_cursor.execute(ssql).fetchall():
                if filename in filenames.split('/'):
                    yield pkgKey, self.repo, self.arch, f"{name}-{version}-{release}", summary

    def autocomplete_packages(self, target, limit):
        if target[0] == '/':
            return

        ssql = f"select name, version, release from packages where name || '-' || version || '-' || release like '{target}%' limit {limit}"

        with self.lock:
            for param in self.sql_cursor.execute(ssql).fetchall():
                yield '-'.join(param)

    def autocomplete_requires(self, target, limit):
        # requires可能重复
        ssql = f"select name from requires where name like '{target}%' group by name limit {limit} "

        with self.lock:
            for (name, ) in self.sql_cursor.execute(ssql).fetchall():
                yield name

    def autocomplete_provides(self, target, limit):
        # provides可能重复
        ssql = f"select name from provides where name like '{target}%' group by name limit {limit}"

        with self.lock:
            for (name, ) in self.sql_cursor.execute(ssql).fetchall():
                yield name

    def autocomplete_filelist(self, target, limit):
        if not target[0] == '/':
            return

        # 查询目录是否是目录下的文件
        f_dir, f_file = target.rsplit('/', maxsplit=1)
        if len(f_file) == 0:
            f_file = None

        # 这里不能带limit 因为后面还有过滤条件
        ssql = f"select filenames from filelist where dirname = '{f_dir}'"
        with self.lock:
            for (filenames, ) in self.sql_cursor.execute(ssql).fetchall():
                for filename in filenames.split('/'):
                    if f_file is None or filename.startswith(f_file):
                        yield f_dir + '/' + filename
                        limit -= 1

        # 查询目录下
        ssql = f"select dirname, filenames from filelist where dirname like '{target}%' limit {limit}"
        with self.lock:
            for (dirname, filenames) in self.sql_cursor.execute(ssql).fetchall():
                for filename in filenames.split('/'):
                    yield dirname + '/' + filename

    def get_pkginfo(self, pkgKey):
        ssql = f"select name, arch, version, release, summary, description, url,\
             time_file, time_build, rpm_license, rpm_group, rpm_buildhost, rpm_sourcerpm,\
                location_href from packages where pkgKey = {pkgKey}"
        info = dict()
        with self.lock:
            for (name, arch, version, release, summary, description, url,
                time_file, time_build, rpm_license, rpm_group, rpm_buildhost, rpm_sourcerpm,
                    location_href) in self.sql_cursor.execute(ssql).fetchall():
                info['name'] = name
                info['arch'] = arch
                info['version'] = version
                info['release'] = release
                info['summary'] = summary
                info['description'] = description
                info['url'] = url
                info['time_file'] = time.strftime(
                    "%Y-%m-%d %H:%M:%S", time.localtime(time_file))
                info['time_build'] = time.strftime(
                    "%Y-%m-%d %H:%M:%S", time.localtime(time_build))
                info['rpm_license'] = rpm_license
                info['rpm_group'] = rpm_group
                info['rpm_buildhost'] = rpm_buildhost
                info['rpm_sourcerpm'] = rpm_sourcerpm

        ssql = f"select name, flags, version, release from provides where pkgKey = {pkgKey}"
        provides = []
        with self.lock:
            for (name, flags, version, release) in self.sql_cursor.execute(ssql).fetchall():
                provides.append(
                    (name, "-" if flags is None else f"{flags} {version}-{release}"))

        ssql = f"select dirname, filenames from filelist where pkgKey = {pkgKey}"
        filelists = list()
        with self.lock:
            for dirname, filenames in self.sql_cursor.execute(ssql).fetchall():
                for filename in filenames.split('/'):
                    filelists.append(os.path.join(dirname, filename))

        ssql = f"select name, flags, version, release from requires where pkgKey = {pkgKey}"
        requires = []
        with self.lock:
            for name, flags, version, release in self.sql_cursor.execute(ssql).fetchall():
                requires.append(
                    (name, "-" if flags is None else f"{flags} {version}-{release}"))

        return {
            "info": info,
            "url": self.url + location_href,
            "requires": requires,
            "provides": provides,
            "filelists": filelists}

    @staticmethod
    def get_repomd_xml_info(repomd_xml):
        """
        解析 /repodata/repomd.xml
        """
        if not os.access(repomd_xml, os.R_OK):
            # 空
            return dict()

        tree = ET.parse(repomd_xml)
        root = tree.getroot()

        namespace, element_name = root.tag[1:].split('}')
        namespaces = {"ns": namespace}

        info = dict()
        info['revision'] = root.find('ns:revision', namespaces).text

        for item in root.findall('ns:data', namespaces):
            data = dict()

            checksum = dict()
            checksum['text'] = item.find('ns:checksum', namespaces).text
            checksum['type'] = item.find('ns:checksum', namespaces).get('type')
            data['checksum'] = checksum

            ns_open_checksum = item.find('ns:open-checksum', namespaces)
            if ns_open_checksum:
                open_checksum = dict()
                open_checksum['text'] = ns_open_checksum.text
                open_checksum['type'] = ns_open_checksum.get('type')
                data['open-checksum'] = open_checksum

            location = dict()
            location['href'] = item.find('ns:location', namespaces).get('href')
            data['location'] = location

            data['timestamp'] = item.find('ns:timestamp', namespaces).text
            data['size'] = item.find('ns:size', namespaces).text

            open_size = item.find('ns:open-size', namespaces)
            if open_size:
                data['open-size'] = open_size.text

            name = item.get('type')
            info[name] = data

        return info

    @staticmethod
    def get_repoxml_info_timestamp(repomd_xml_info, key):
        if repomd_xml_info and key in repomd_xml_info and 'timestamp' in repomd_xml_info[key]:
            return repomd_xml_info[key]['timestamp']

        return None

    @staticmethod
    def get_repodata_info(repodata_dir, url, oldinfo):
        repomd_xml_path = os.path.join(repodata_dir, 'repomd.xml')
        assert os.access(repomd_xml_path, os.F_OK)

        # 下载所有内容
        info = dict()
        info['repomd_xml_path'] = repomd_xml_path
        info['repomd_xml_info'] = Repo.get_repomd_xml_info(repomd_xml_path)

        for key, dic in info['repomd_xml_info'].items():
            if not type(dic) == dict:
                continue
            if not 'location' in dic and not 'href' in dic['location']:
                continue

            # 只有这两个信息有用
            if not key == 'filelists_db' and not key == 'primary_db':
                continue

            href = dic['location']['href']
            filename = os.path.join(repodata_dir, f'{href.split("/")[-1]}')

            if filename.endswith('.xml'):
                info[key] = filename
            else:
                info[key] = get_decompression_path(filename)

            timestamp = dic['timestamp'] if 'timestamp' in dic else None

            # 因为timestamp而更新
            update_timestamp = (timestamp is None or
                                (oldinfo and not timestamp == Repo.get_repoxml_info_timestamp(oldinfo, key)))

            # 文件已更新 或者 文件不存在 或者 时间戳是新的 则 下载
            if not os.access(filename, os.F_OK) or update_timestamp:
                print(f'download {url + href}')
                download(url + href, filename)

            if info[key] == filename:
                continue

            if not os.access(info[key], os.F_OK) or update_timestamp:
                decompression(filename)

        return info

    @staticmethod
    def update_repodata(workdir, url, update):
        """
        更新repodata目录
        """
        localdir = os.path.join(workdir, 'repodata')
        repomd_xml = os.path.join(localdir, 'repomd.xml')

        # 不更新仓库
        if not update:
            assert os.path.isdir(localdir), f"仓库 {localdir} 信息不存在"
            assert os.access(repomd_xml, os.F_OK), f"仓库 {repomd_xml} 信息不存在"

            oldinfo = Repo.get_repomd_xml_info(repomd_xml)
            return Repo.get_repodata_info(localdir, url, oldinfo)

        if not os.access(localdir, os.F_OK):
            os.makedirs(localdir)
        if not os.path.isdir(localdir):
            raise Exception(f"ERROR: {localdir} is not a directory")

        repomd_xml_url = os.path.join(url, 'repodata/repomd.xml')

        if os.access(repomd_xml, os.F_OK):
            oldinfo = Repo.get_repomd_xml_info(repomd_xml)
        else:
            oldinfo = None

        try:
            download(repomd_xml_url, repomd_xml)
        except Exception as e:
            print(f'download {repomd_xml_url} failed: {e}', file=sys.stderr)

        # 返回info信息
        return Repo.get_repodata_info(localdir, url, oldinfo)


class RPM(Store):
    def __init__(self, workdir, os_name, repos: dict, update=False):
        """
        此时workdir已经包含os_name
        """
        self.workdir = workdir
        self.os_name = os_name

        self.repos = list()

        self.pkginfo_repos = dict()         # 方便根据repo arch 查

        for repo_name, repoparams in repos.items():
            for arch, repo_url in repoparams.items():
                repo_workdir = os.path.join(workdir, f'{repo_name}-{arch}')
                repo = Repo(repo_name, arch, repo_url, repo_workdir, update)
                self.repos.append(repo)

                if not repo_name in self.pkginfo_repos:
                    self.pkginfo_repos[repo_name] = dict()
                self.pkginfo_repos[repo_name][arch] = repo

    def search(self, target, filter_arch, filter_search, limit):
        for repo in self.repos:
            if limit <= 0:
                return
            if not repo.arch in filter_arch:
                continue

            if limit > 0 and 'Pkg Name' in filter_search:
                for result in repo.search_packages(target, limit):
                    yield result
                    limit -= 1

            if limit > 0 and 'Requires' in filter_search:
                for result in repo.search_requires(target, limit):
                    yield result
                    limit -= 1

            if limit > 0 and 'Provides' in filter_search:
                for result in repo.search_provides(target, limit):
                    yield result
                    limit -= 1

            if limit > 0 and 'files' in filter_search:
                """这个使用sqlite函数实现起来比较麻烦 还是单独搞吧"""
                for result in repo.search_filelist(target, limit):
                    yield result
                    limit -= 1

    def autocomplete(self, target, filter_arch, filter_search, limit):
        for repo in self.repos:
            if limit <= 0:
                return
            if not repo.arch in filter_arch:
                continue

            if limit > 0 and 'Pkg Name' in filter_search:
                for result in repo.autocomplete_packages(target, limit):
                    yield result
                    limit -= 1

            if limit > 0 and 'Requires' in filter_search:
                for result in repo.autocomplete_requires(target, limit):
                    yield result
                    limit -= 1

            if limit > 0 and 'Provides' in filter_search:
                for result in repo.autocomplete_provides(target, limit):
                    yield result
                    limit -= 1

            if limit > 0 and 'files' in filter_search:
                for result in repo.autocomplete_filelist(target, limit):
                    yield result
                    limit -= 1

    def get_pkginfo(self, pkgKey, repo, arch):
        if not repo in self.pkginfo_repos:
            return
        if not arch in self.pkginfo_repos[repo]:
            return

        return self.pkginfo_repos[repo][arch].get_pkginfo(pkgKey)
