"""
@author: xiangping
@contact: xiangpingbu@gmail.com
@time: 2020/6/22 12:18 下午
@file: kafka_service
@Desc:
"""

import json
import logging
import signal
import sys
import threading
from threading import Event

from kafka import KafkaConsumer, TopicPartition, OffsetAndMetadata

from app.exts import ironman_redis as redis
from app.models.po.base_column import shaojie_column, gaolu_column,lugang_column, hanfan_column,lugang_qinshi_column,\
    lugang_activiness_long_column,lugang_activiness_short_column
from app.run import app
from app.services.business import gaolu_biz_service
from app.services.dal import gaolu_dao_service, hanfan_dao_service


class FlaskKafka():
    def __init__(self, interrupt_event, **kw):
        self.consumer = KafkaConsumer(**kw)
        self.handlers = {}
        self.interrupt_event = interrupt_event
        logger = logging.getLogger('flask-kafka-consumer')
        ch = logging.StreamHandler(sys.stdout)
        ch.setLevel(logging.INFO)
        formatter = logging.Formatter('[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s')
        ch.setFormatter(formatter)
        logger.addHandler(ch)
        logger.setLevel(logging.INFO)
        self.logger = logger

    def _add_handler(self, topic, handler):
        if self.handlers.get(topic) is None:
            self.handlers[topic] = []
        self.handlers[topic].append(handler)

    def handle(self, topic):
        def decorator(f):
            self._add_handler(topic, f)
            return f

        return decorator

    def _run_handlers(self, msg):
        try:
            handlers = self.handlers[msg.topic]
            for handler in handlers:
                handler(msg)
            # self.consumer.commit()
        except Exception as e:
            self.logger.critical(str(e), exc_info=1)
            self.consumer.close()
            sys.exit("Exited due to exception")

    def signal_term_handler(self, signal, frame):
        self.logger.info("closing consumer")
        self.consumer.close()
        sys.exit(0)

    def _start(self):

        # for topic in self.handlers.keys():
        #     ps = [TopicPartition(topic, p) for p in self.consumer.partitions_for_topic(topic)]
        #     offset_dict = self.consumer.end_offsets(ps)
        #     for p in ps:
        #         # 获取最新的offset
        #         end_offset = offset_dict[p]
        #         logging.info("latest offset is {}".format(end_offset))
        #         metadata = OffsetAndMetadata(offset=end_offset - 24, metadata="")
        #         # 提交offset
        #         # self.consumer.assign([p])
        #         # self.consumer.seek(p, end_offset - 100)
        #         self.consumer.commit({p: metadata})

        self.consumer.subscribe(topics=tuple(self.handlers.keys()))
        self.logger.info("starting consumer...registered signterm")

        for msg in self.consumer:
            self.logger.debug("TOPIC: {}, PAYLOAD: {}".format(msg.topic, msg.value))
            self._run_handlers(msg)
            # stop the consumer
            if self.interrupt_event.is_set():
                self.interrupted_process()
                self.interrupt_event.clear()

    def interrupted_process(self, *args):
        self.logger.info("closing consumer")
        self.consumer.close()
        sys.exit(0)

    def _run(self):
        self.logger.info(" * The flask Kafka application is consuming")
        t = threading.Thread(target=self._start)
        t.start()

    # run the consumer application
    def run(self):
        self._run()


INTERRUPT_EVENT = Event()

bus = FlaskKafka(INTERRUPT_EVENT,
                 bootstrap_servers=",".join(app.config['KAFKA_SERVERS']),
                 group_id="qt-kafka3",
                 value_deserializer=lambda m: json.loads(m.decode('utf-8')),
                 auto_offset_reset='latest'
                 )

bus2 = FlaskKafka(INTERRUPT_EVENT,
                 bootstrap_servers=",".join(app.config['KAFKA_SERVERS']),
                 group_id="qt-kafka4",
                 value_deserializer=lambda m: json.loads(m.decode('utf-8')),
                 auto_offset_reset='latest'
                 )

# bus3 = FlaskKafka(INTERRUPT_EVENT,
#                  bootstrap_servers=",".join(app.config['KAFKA_SERVERS']),
#                  group_id="qt-kafka5",
#                  value_deserializer=lambda m: json.loads(m.decode('utf-8')),
#                  auto_offset_reset='latest',
#                  max_poll_interval_ms = 10 * 60 * 1000
#                  )



def listen_kill_server():
    signal.signal(signal.SIGTERM, bus.interrupted_process)
    signal.signal(signal.SIGINT, bus.interrupted_process)
    signal.signal(signal.SIGQUIT, bus.interrupted_process)
    signal.signal(signal.SIGHUP, bus.interrupted_process)

    signal.signal(signal.SIGTERM, bus2.interrupted_process)
    signal.signal(signal.SIGINT, bus2.interrupted_process)
    signal.signal(signal.SIGQUIT, bus2.interrupted_process)
    signal.signal(signal.SIGHUP, bus2.interrupted_process)



from datetime import datetime


@bus2.handle('syn_data')
def test_topic_handler(msg):
    try:
        start = datetime.strptime(msg.value.get("beginTime"), "%Y-%m-%d %H:%M:%S")
        end = datetime.strptime(msg.value.get("endTime"), "%Y-%m-%d %H:%M:%S")
        # logging.info("receive msg:" + json.dumps(msg.value))
        with app.app_context():
            gaolu_dao_service.dump_gaolu_data(start, end)
            # logging.info("dump gaolu data success")
    except Exception as e:
        logging.error('error while dump gaolu data: ' + str(e), exc_info=1)\

@bus.handle('syn_data_point')
def test_topic_handler2(msg):
    try:
        timestamp = msg.timestamp / 1000
        if timestamp % 5 == 0:
            logging.info("receive data point msg:" + json.dumps(msg.value))

        # logging.info("receive point msg:" + json.dumps(msg.value))
        content = json.loads(msg.value)
        tag_name = content['tagName']

        msg_date = datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
        redis.hset("update", tag_name, msg_date)  # 设置更新时间
        redis.set("latestUpdate", msg_date)  # 设置最近的数据更新时间
        if tag_name in shaojie_column:
            redis.hset("shaojie", tag_name, content.get('value', 0))
            redis.hset("shaojie_update", tag_name, msg_date)

        if tag_name in lugang_column:
            redis.hset("lugang",tag_name,content.get('value',0))
            content_date = content.get('dateTime')
            redis.hset("lugang_update",tag_name,content_date)

        if tag_name in lugang_qinshi_column:
            redis.hset("lugang_qinshi",tag_name,content.get('value',0))
            content_date = content.get('dateTime')
            redis.hset("lugang_qinshi_update",tag_name,content_date)

        if tag_name in lugang_activiness_long_column:
            redis.hset("lugang_activiness_long",tag_name,content.get('value',0))
            content_date = content.get('dateTime')
            redis.hset("lugang_activiness_long_update", tag_name, content_date)

        if tag_name in lugang_activiness_short_column:
            redis.hset("lugang_activiness_short",tag_name,content.get('value',0))
            content_date = content.get('dateTime')
            redis.hset("lugang_activiness_short_update", tag_name, content_date)

        if tag_name in gaolu_column:
            redis.hset("gaolu", tag_name, content.get('value', 0))
            redis.hset("gaolu_update", tag_name, msg_date)
            # 高炉数据有过期时间
            redis.expire("gaolu", 24 * 60 * 60 * 1000)

        if tag_name == 'CG_LT_GL_GL04_H2ZXFX':
            tag_name = 'CG_LT_GL_GL04_H2ZXZF'
            redis.hset("gaolu", tag_name, content.get('value', 0))
        if tag_name == 'CG_LT_GL_GL04_N2ZXFX':
            tag_name = 'CG_LT_GL_GL04_N2ZXZF'
            redis.hset("gaolu", tag_name, content.get('value', 0))

        # set hanfan data into redis
        if tag_name in hanfan_column:
            redis.hset("hanfan", tag_name, content.get('value', 0))
            redis.hset("hanfan_update", tag_name, msg_date)
            # redis.expire("hanfan", 24 * 60 * 60 * 1000)
        if tag_name == 'CG_LT_GL_GL04_H2ZXFX':
            tag_name = 'CG_LT_GL_GL04_H2ZXZF'
            redis.hset("hanfan", tag_name, content.get('value', 0))
        if tag_name == 'CG_LT_GL_GL04_N2ZXFX':
            tag_name = 'CG_LT_GL_GL04_N2ZXZF'
            redis.hset("hanfan", tag_name, content.get('value', 0))
    except Exception as e:
        logging.error('error while dump data of topic syn_data_point: ' + str(e), exc_info=1)

listen_kill_server()
bus.run()
bus2.run()
# bus3.run()
