"""parse log used re"""
import collections
import concurrent.futures
import io
import os
import re
import typing

from conf import FileConf
from utils import DataSet


class AbstractParser:

    def __init__(self, conf):
        self._conf = conf
        self._use_mul_core = conf.use_mul_core
        self._interval = conf.file_interval
        self._log_file = conf.log_file
        self._log_dir = conf.log_dir

    def parse(self):
        raise NotImplementedError


class Parser(AbstractParser):
    """usually one parser match one log file(FileConf), one pattern match more analyser"""

    class Pattern:
        any = r".*"
        string = r"\w+"
        int = r"\d+"
        float = r"\d*\.?\d+"

    _pat = r"<(?P<key>.*?):(?P<value>.*?)>"

    def __init__(self, conf: FileConf):
        """
        main parser, you must register pattern-analyser before call parse method
        :param conf:
        """
        super().__init__(conf)
        # if use one cpu core, the log is a opened file descriptor
        self._log = None
        self._pool = None
        self._analysers = collections.defaultdict(list)

        self._prepare_process(conf)

    def _prepare_process(self, conf):
        """
        make some prepare things, such as create processes pool or open a file
        :param conf:
        :return:
        """
        if conf.is_file:
            # single log file
            if self._use_mul_core:
                self._pool = concurrent.futures.ProcessPoolExecutor(os.cpu_count())
            else:
                self._log = self._conf.log_file.open(encoding=self._conf.log_file_encoding)
        else:
            pass

    def add_analyser(self, pattern: str):
        """
        This is a decorator, could decorate a analyse func
        if a pattern have more analyze func, previous func
        should return handled data, the data will pass to next func
        :param pattern:
        :return:
        """

        pat = self._parse_pattern(pattern)

        def _wrapper(analyser: typing.Callable):
            self._analysers[pat].append(analyser)
            return analyser
        return _wrapper

    def _parse_pattern(self, pattern: str):
        """
        will translate user-friendly pattern to re-pattern and compile
        :param pattern:
        :return:
        """
        return re.compile(re.sub(self._pat, self._replace, pattern))

    def _replace(self, match: re.Match):
        """
        will translate r"<name:any>" to r"(?P<name>.*)"

        :param match:
        :return:
        """
        d = match.groupdict()
        key = d.get("key")
        value = getattr(self.Pattern, d.get("value"), self.Pattern.any)

        # here can be optimization, because user need write `<_:throw_away>` ,This is too much trouble
        # TODO will change to <throw_away> latter
        # if user can be write re pattern, he/she can write directly
        if key == "_":
            return value
        return r"(?P<{}>{})".format(key, value)

    def add_analyser2(self, pattern: str, analyser: typing.Callable):
        """
        common func
        :param pattern:
        :param analyser:
        :return:
        """
        self.add_analyser(pattern)(analyser)
        return analyser

    def parse(self):
        """
        will iter log file use added analyser
        call this method must be register a handle with a pattern
        :return:
        """
        files = []
        pos = []

        if self._use_mul_core:
            for i in range(os.cpu_count()):
                f: io.TextIOWrapper = self._log_file.open(encoding=self._conf.conf_file_encoding)
                start_pos = self._patch_file_point(f, i)
                files.append(f)
                pos.append(start_pos)
            pos.append(None)

            for file, po in zip(files, pos[1:]):
                # multi cpu core process section log parallel
                self._pool.submit(self._iter, file, po)
        else:
            with self._log as file:
                self._iter(file)

    def _patch_file_point(self, file, i):
        """
        this function is core of successfully process files in parallel
        first according i and interval to seek file point, now, general
        not locate a certain position (line head), so if we need change
        file point, but if file point on the number 0, we passed, otherwise,
        we need read a new line, then, our file point will be in a line head
        and return the file point position, the outer func will record this
        position and use it as the process stop flag.
        I think this logic is cool! hope it can worked
        :param file:
        :param i:
        :return:
        """
        file.seek(self._interval*i)
        if file.tell():
            file.readline()
        return file.tell()

    def _iter(self, file, stop_pos=None):
        """
        will iter opened file
        :param file:
        :param stop_pos:
        :return:
        """
        if stop_pos is None:
            for line in file:
                self._iter_once(line)
        else:
            for line in file:
                # TODO could add a little strategy, after all, each cycle has to be more expensive and expensive
                if file.tell() == stop_pos:
                    break
                self._iter_once(line)

    def _iter_once(self, line):
        """
        use register pattern-[analyser...] to match and analyze every log
        every analyser return value will pass next analyser
        :param line:
        :return:
        """
        # TODO there are so much things to do
        for pattern, analysers in self._analysers.items():
            match: re.Match = pattern.match(line)
            data = DataSet(match.groupdict() if match else ())
            for analyser in analysers:
                data = analyser(data, line)
