import logging
import json
import requests
from logstash_async.handler import AsynchronousLogstashHandler, SynchronousLogstashHandler
from elasticsearch import Elasticsearch
import colorlog
import time


class Log():
    def __init__(self):
        self.__check_init__ = False

    def init(self, platform):
        self.platform = platform
        self.__check_init__ = True
        return self

    def __stream_handler__(self):
        # formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        formatter = colorlog.ColoredFormatter(
            fmt='%(log_color)s[%(asctime)s.%(msecs)03d] %(filename)s -> %(funcName)s line:%(lineno)d [%(levelname)s] : %(message)s',
            datefmt='%Y-%m-%d  %H:%M:%S',
            log_colors={
                'DEBUG': 'white',  # cyan white
                'INFO': 'green',
                'WARNING': 'yellow',
                'ERROR': 'red',
                'CRITICAL': 'bold_red',
            }
        )
        _h = logging.StreamHandler()
        _h.setFormatter(formatter)
        return _h

    def ConsoleLogger(self, level=logging.INFO):
        _logger = logging.getLogger(self.platform)
        _logger.setLevel(level)
        _logger.addHandler(self.__stream_handler__())
        return _logger


class LogELK(Log):
    HttpTransport = "logstash_async.transport.HttpTransport"
    UdpTransport = "logstash_async.transport.UdpTransport"
    TcpTransport = "logstash_async.transport.TcpTransport"

    def __init__(self,
                 elasticsearch_conf: dict = {},
                 logstash_conf: dict = {},
                 kibana_conf: dict = {}):
        """

        :param elasticsearch_conf: {"hosts": ["http://127.0.0.1:9200/"]}
        :param logstash_conf: {"host": "127.0.0.1", "port":8000}
        :param kibana_conf: {"host":"127.0.0.1", "port":5601}
        """
        super(LogELK, self).__init__()
        self.elasticsearch_conf = elasticsearch_conf
        self.logstash_conf = logstash_conf
        self.kibana_conf = kibana_conf
        self.__check_init__ = False

    def init(self,
             platform,
             number_of_shards=1,
             number_of_replicas=0,
             ):
        self.platform = platform
        self.number_of_shards = number_of_shards
        self.number_of_replicas = number_of_replicas
        self.es = Elasticsearch(**self.elasticsearch_conf)
        self.kibana_url = f'http://{self.kibana_conf.get("host")}:{self.kibana_conf.get("port")}'
        self.__create_mapping__()
        self.__check_init__ = True
        return self

    def __create_mapping__(self):
        mapping = {
            "settings": {
                "number_of_shards": self.number_of_shards,
                "number_of_replicas": self.number_of_replicas
            },
            "mappings": {
                "properties": {
                    "@timestamp": {
                        "type": "date",
                        # "format": "yyyy-MM-dd HH:mm:ss"
                    }
                }
            }
        }
        try:
            _exists = self.es.indices.exists(self.platform)
        except Exception as e:
            logging.exception(f"es连接失败...\n{str(e)}")
            return
        if not _exists:
            try:
                self.es.indices.create(self.platform, body=mapping)
            except Exception as e:
                logging.exception(f"es连接失败...\n{str(e)}")
                return
            try:
                res = self.__create_index_pattern__()
                if res.status_code != 200:
                    raise Exception("kibana索引失败")
            except Exception as e:
                self.es.indices.delete(self.platform)
                logging.exception(f"kibana连接失败...\n{str(e)}")

    def __create_index_pattern__(self):
        headers = {'Content-Type': 'application/json', 'kbn-xsrf': 'true'}
        url = self.kibana_url + "/api/saved_objects/index-pattern"
        data = {"attributes": {"title": self.platform, "timeFieldName": "@timestamp"}}
        return requests.post(url, headers=headers, json=data)

    def __check__(self):
        __transport = [self.HttpTransport, self.TcpTransport, self.UdpTransport]
        if not self.logstash_conf.get("transport") or \
                self.logstash_conf.get("transport") not in __transport:
            raise Exception(f"请指定logstash.transport {json.dumps(__transport)}")
        if not self.__check_init__:
            raise Exception("请先执行init()")

    def SyncLogger(self, level=logging.INFO, console: bool = True):
        self.__check__()
        _logger = logging.getLogger(self.platform)
        _logger.setLevel(level)
        _logger.addHandler(SynchronousLogstashHandler(**self.logstash_conf))
        if console:
            _logger.addHandler(self.__stream_handler__())
        return _logger

    def AsyncLogger(self, level=logging.INFO, console: bool = True):
        self.__check__()
        _logger = logging.getLogger(self.platform)
        _logger.setLevel(level)
        _logger.addHandler(AsynchronousLogstashHandler(**self.logstash_conf))
        if console:
            _logger.addHandler(self.__stream_handler__())
        return _logger

    def SyncHttpLogger(self, level=logging.INFO, console: bool = True):
        self.logstash_conf["transport"] = self.HttpTransport
        self.logstash_conf["database_path"] = self.HttpTransport + ".db"
        return self.SyncLogger(level=level, console=console)

    def SyncTcpLogger(self, level=logging.INFO, console: bool = True):
        self.logstash_conf["transport"] = self.TcpTransport
        self.logstash_conf["database_path"] = self.TcpTransport + ".db"
        return self.SyncLogger(level=level, console=console)

    def SyncUdpLogger(self, level=logging.INFO, console: bool = True):
        self.logstash_conf["transport"] = self.UdpTransport
        self.logstash_conf["database_path"] = self.UdpTransport + ".db"
        return self.SyncLogger(level=level, console=console)

    def AsyncHttpLogger(self, level=logging.INFO, console: bool = True):
        self.logstash_conf["transport"] = self.HttpTransport
        self.logstash_conf["database_path"] = self.HttpTransport + ".db"
        return self.AsyncLogger(level=level, console=console)

    def AsyncTcpLogger(self, level=logging.INFO, console: bool = True):
        self.logstash_conf["transport"] = self.TcpTransport
        self.logstash_conf["database_path"] = self.TcpTransport + ".db"
        return self.AsyncLogger(level=level, console=console)

    def AsyncUdpLogger(self, level=logging.INFO, console: bool = True):
        self.logstash_conf["transport"] = self.UdpTransport
        self.logstash_conf["database_path"] = self.UdpTransport + ".db"
        return self.AsyncLogger(level=level, console=console)


def test():
    # elasticsearch 配置
    elasticsearch_conf = {
        "hosts": ["http://172.31.3.188:9200/"]
    }
    # logstash http 配置
    http_logstash_conf = {
        "host": "172.31.3.188",
        "port": 8001
    }
    # logstash udp 配置
    udp_logstash_conf = {
        "host": "172.31.3.188",
        "port": 8002
    }
    # # logstash tcp 配置
    tcp_logstash_conf = {
        "host": "172.31.3.188",
        "port": 8003
    }
    # kibana 配置
    kibana_conf = {
        "host": "172.31.3.188",
        "port": 5601
    }
    # 实例logger
    logger = Log().init(platform="test").ConsoleLogger()
    # Async
    # logger = LogELK(
    #     elasticsearch_conf=elasticsearch_conf,
    #     logstash_conf=http_logstash_conf,
    #     kibana_conf=kibana_conf).init(platform="test").AsyncHttpLogger()
    # logger = LogELK(
    #     elasticsearch_conf=elasticsearch_conf,
    #     logstash_conf=tcp_logstash_conf,
    #     kibana_conf=kibana_conf).init(platform="test").AsyncTcpLogger()
    # logger = LogELK(
    #     elasticsearch_conf=elasticsearch_conf,
    #     logstash_conf=udp_logstash_conf,
    #     kibana_conf=kibana_conf).init(platform="test").AsyncUdpLogger()
    # Sync
    # logger = LogELK(
    #     elasticsearch_conf=elasticsearch_conf,
    #     logstash_conf=http_logstash_conf,
    #     kibana_conf=kibana_conf).init(platform="test").SyncHttpLogger()
    # logger = LogELK(
    #     elasticsearch_conf=elasticsearch_conf,
    #     logstash_conf=tcp_logstash_conf,
    #     kibana_conf=kibana_conf).init(platform="test").SyncTcpLogger()
    # logger = LogELK(
    #     elasticsearch_conf=elasticsearch_conf,
    #     logstash_conf=udp_logstash_conf,
    #     kibana_conf=kibana_conf).init(platform="test").SyncUdpLogger()

    logger.info("info 测试")
    logger.debug("debug 日志")
    logger.warning("warning 日志")
    try:
        1 / 0
    except Exception as e:
        logger.exception(str(e))
    logger.info("测试加入uuid", extra={"uuid": "abcdefgsadasda"}, )
    logger.info("测试加入uuid", extra={"uuid": "abcdefgsadasda"})
    logger.info("测试加入uuid", extra={"uuid": "abcdefgsadasda"})


if __name__ == '__main__':
    test()
