#!/usr/bin/python3
"""
Description: Parser manager.
Class: Manager
"""
import os
import stat
import threading
import json
from collections import defaultdict
from pygrok import Grok

from ..libs.log import LOGGER
from ..libs.readconfig import read_yaml_config_file
from .client import ParserClient
from ..parsers.mixinfo import MixInfoParser
from ..parsers.log import LogParser
from ..parsers.config import ConfigParser
from ..parsers.table import TableParser
from ..plugins.list import ListParser

__here__ = os.path.realpath(os.path.dirname(os.path.dirname(__file__)))


class Manager:
    """
    class Manager is the controller which can get data from datasource,
    and parse the data using parsers.
    """
    _instance_lock = threading.Lock()
    init_flag = False

    def __new__(cls, *args, **kwargs):
        """
        singleton class
        """
        LOGGER.debug("args: %s", args)
        LOGGER.debug("kwargs: %s", kwargs)

        if not hasattr(Manager, "_instance"):
            with Manager._instance_lock:
                if not hasattr(Manager, "_instance"):
                    Manager._instance = object.__new__(cls)

        return Manager._instance

    def __init__(self, regex_config_file=None, regex_custom_dir=None, datasource=None):
        """
        Class instance initialization.

        Args:
            regex_config_file (str): the path to regex configuration file.
            datasource (object): an object that keeps the log configuration and log content.
        """
        if Manager.init_flag:
            return
        self.parsers = {}
        self.regex_config = defaultdict(dict)
        self.param_config = None
        self.datasource = datasource
        # read parameter configuration and map path to symbolic name.
        self.get_param_config()
        self.get_regex_config(regex_config_file, regex_custom_dir)
        self.load_parsers()
        self.check_result = {}
        Manager.init_flag = True

    def parse(self, **kwargs):
        """
        Get data and parse.

        Args:

        Returns:
        """
        # load the parser client
        client = ParserClient(self.datasource, self.param_config,
                              self.regex_config, self.parsers)
        client.get_data()
        client.get_parsed_content()

        if kwargs.get('print'):
            self.print_res(client.result)

        if kwargs.get('write'):
            self.write_res(client.result, kwargs.get('file_dir'))

        return client.result

    @staticmethod
    def write_res(res, file_dir):
        """
        write result to the specfied file.

        Args:
            res (list): spec list.
            file_path (str): file path.
        """
        output = dict()
        name_cache = defaultdict(int)

        for spec in res:
            name = spec.symbolic_name
            content_type = spec.content_type
            if content_type is None:
                content_type = 'other'
            if name in name_cache.keys():
                name = name + '_' + str(name_cache[name])
            name_cache[spec.symbolic_name] += 1
            if content_type not in output.keys():
                output[content_type] = defaultdict(dict)
            output[content_type][name]['time'] = spec.time.strftime("%Y-%m-%d-%H:%M:%S")
            output[content_type][name]['hostname'] = spec.hostname
            output[content_type][name]['path'] = spec.file_path
            output[content_type][name]['parse'] = spec.parse_flag
            output[content_type][name]['result'] = spec.res

        for key, value in output.items():
            json_str = json.dumps(value)
            write_path = os.path.join(file_dir, key) + '.json'
            if os.path.exists(write_path):
                os.remove(write_path)
            flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
            modes = stat.S_IWUSR | stat.S_IRUSR
            with os.fdopen(os.open(write_path, flags, modes), 'w') as json_file:
                json_file.write(json_str)
            LOGGER.info("write to %s.", write_path)

    @staticmethod
    def print_res(res):
        """
        print result.

        Args:
            res (list): spec list.
        """
        count = 0
        for spec in res:
            print(spec.symbolic_name.center(120, '='))
            print("success = ", spec.parse_flag)
            if spec.parse_flag:
                count += 1
            print("time = ", spec.time)
            print("hostname = ", spec.hostname)
            print("file_path = ", spec.file_path)
            result = spec.res
            if isinstance(result, list):
                for line in result:
                    print(line)
            else:
                print(result)
        print("Statics".center(120, '='))
        print("Number of contents: ", len(res))
        print("Number of contents that parsed succeed: ", count)

    def get_param_config(self):
        """
        Load the parameter configuration from datasource.

        Args:

        Returns:
        """
        if self.datasource is None:
            LOGGER.error("No datasource input.")
            return

        if self.param_config is None:
            self.param_config = self.datasource.param_config
            if len(self.param_config) != 0:
                LOGGER.info("Parameter configuration file loaded.")

    def get_regex_config(self, regex_config_file, regex_custom_dir):
        """
        compile valid regex pattern.

        Args:
            config (dict): raw regex config

        Returns:

        """
        config = read_yaml_config_file(regex_config_file)

        if config is None:
            return

        custom_patterns_dir = regex_custom_dir or config.get(
            'custom_file')
        self.regex_config['custom_file'] = custom_patterns_dir
        grok = {}
        if config.get('patterns') is None:
            LOGGER.warning("No pattern in regex config file.")
        else:
            for log_type in config['patterns'].keys():
                grok[log_type] = []
                self.regex_config['patterns'][log_type] = []
                for pattern in config['patterns'][log_type]:
                    try:
                        grok[log_type].append(
                            Grok(pattern, custom_patterns_dir))
                        self.regex_config['patterns'][log_type].append(
                            pattern)
                    except KeyError:
                        LOGGER.warning(
                            "Pattern '%s' in '%s' compiled failed.", pattern, log_type)
                        continue

        self.regex_config['grok'] = grok if len(grok) > 0 else None
        LOGGER.info("Regex pattern compiled.")

    def load_parsers(self):
        """
        load builtin parsers and custom parsers.

        Args:

        Returns:
        """
        # load builtin parsers
        self.parsers['mixinfo'] = MixInfoParser
        self.parsers['table'] = TableParser
        self.parsers['log'] = LogParser
        self.parsers['config'] = ConfigParser
        LOGGER.info("All builtin parsers loaded.")
        # load custom parsers
        self.parsers['list'] = ListParser
        LOGGER.info("All custom parsers loaded.")

        if len(self.parsers) == 0:
            LOGGER.warning("There is no valid parser.")
