#!usr/bin/env python
# -*- coding:utf-8 _*-
""" 
@author:robot
@file: tap_connector12_kafka.py
@version:
@time: 2022/01/13 
"""
from pprint import pprint

from kafka.errors import UnknownTopicOrPartitionError
from kafka import KafkaProducer, KafkaConsumer, KafkaClient, KafkaAdminClient, TopicPartition
import json
from TapClientConnector.connector.tap_connector00_base import *

# new = {'op': 'i','after': {'test':1}}
# data = {'self.op': 'self.op_insert',
#         'self.sync_data': {"string": "string", "number": 1000, 'boolean': True, 'Array': [1, 2, 3],
#                               'object': {'test': 'test'}}}
#
#
# json_data = json.dumps(data)
# bytes_data = bytes(json_data, encoding='utf-8')
# producer = KafkaProducer(bootstrap_servers=["192.168.1.183:9092"])
# producer.send('kafka2', bytes_data)
# producer.close()

pk = 'pk'
kafka_split = '<<<>>>'


class TAPKafkaConnector(TAPConnectorBase):

    def __init__(self, kwargs):
        self.pdk = kwargs.get('pdk')
        self.op = 'self.op'
        self.sync_data = 'self.sync_data'
        self.op_insert = 'self.op_insert'
        self.op_update = 'self.op_update'
        self.op_delete = 'self.op_delete'

        if self.pdk:
            self.op = 'mqOp'
            self.sync_data = 'sync_data'
            self.op_insert = 'insert'
            self.op_update = 'update'
            self.op_delete = 'delete'
        super().__init__(kwargs)
        kafka_topic = self.DB_INFO.get('kafkaPatternTopics')
        kafkaBootstrapServers = self.DB_INFO.get('kafkaBootstrapServers')
        self.kafka_user = self.FORMAT_DB_INFO.get('user')
        self.kafka_password = self.FORMAT_DB_INFO.get('password')
        self.kafka_struct = self.FORMAT_DB_INFO.get('new')
        if self.kafka_struct:
            self.op = 'op'
            self.sync_data = 'after'
            self.op_insert = 'i'
            self.op_update = 'u'
            self.op_delete = 'd'
        self.kb_list = kafkaBootstrapServers.split(',')
        self.consumer_start_offset = 0
        self.consumer_end_offset = 100000
        self.partition_sit = 0
        self.kafka_data = {}
        self.kafka_json_data = []
        self.connector = type('TypeClientKafka',
                              (object,),
                              dict(tapkafkaAdminClient=KafkaAdminClient,
                                   tapkafkaClient=KafkaClient,
                                   tapkafkaProducer=KafkaProducer,
                                   tapkafkaConsumer=KafkaConsumer))

    def __kafka_transform(self, topic, pk=pk):
        for column in self.kafka_data[topic]:
            op = column[self.op]
            data = column.get(self.sync_data)
            if op == self.op_insert:
                self.kafka_json_data.append(data)
            if op == self.op_update:
                for old in self.kafka_json_data:
                    try:
                        if old[pk] == data[pk]:
                            # for new_k, new_value in data.items():
                            #     old[new_k] = new_value
                            self.kafka_json_data.remove(old)
                            self.kafka_json_data.append(data)
                    except:
                        logger.warning('kafka 数据没有pk预设主键，查询整理结果无效')
            if op == self.op_delete:
                for old in self.kafka_json_data:
                    try:
                        if old[pk] == data[pk]:
                            self.kafka_json_data.remove(old)
                    except:
                        logger.warning('kafka 数据没有pk预设主键，查询整理结果无效')

    @staticmethod
    def __kafka_consumer(consumer, msg_count):
        this_msg = []
        for msg in consumer:
            new_op = {_op[0]: _op[1].decode() for _op in [op for op in msg.headers]}
            _msg = json.loads(msg.value.decode())
            if msg.value:
                _msg = {
                    'sync_data': _msg,
                    **new_op
                }
                this_msg.append(_msg)
            if len(this_msg) == msg_count:
                consumer.close()
        return this_msg

    def __kafka_find(self, topic, msg_count, timeout):
        consumer: KafkaConsumer = self.connector.tapkafkaConsumer(
            bootstrap_servers=self.kb_list,
            consumer_timeout_ms=timeout,
            group_id='test',
        )
        part = TopicPartition(topic, self.partition_sit)
        consumer.assign([part])
        consumer.seek(part, self.consumer_start_offset)
        _msg = self.__kafka_consumer(consumer, msg_count)
        return _msg

    def _query(self, table, msg_count=30, timeout=5000) -> list:
        where = None
        if kafka_split in table:
            _table = table.split(kafka_split)
            table = _table[0]
            where = _table[1]
        self.kafka_data[f'{table}'] = []
        res = self.__kafka_find(table, msg_count, timeout=timeout)
        if not res:
            logger.fatal(f'kafka topic {table} consumer timeout {timeout} ms')
        self.kafka_data[f'{table}'].extend(res)
        self.__kafka_transform(topic=table)
        logger.info(f'kafka topic {table} the {msg_count} times msg is {self.kafka_data[f"{table}"]}')
        res = self.kafka_json_data
        self.kafka_json_data = []
        self.kafka_data = {}
        if where:
            for one in res:
                if one[pk] == where:
                    return one
            return []
        return res

    @connector_exec_logger
    def _exec(self, data):
        if type(data) is str:
            data = json.loads(data)
        operate = data.get('operate')
        topic = data.get('topic')
        _topic_data = {}
        try:
            if operate == 'drop':
                kcadmin = self.connector.tapkafkaAdminClient(bootstrap_servers=self.kb_list)
                res = kcadmin.delete_topics(topic, timeout_ms=5000)
                logger.info(f"kafka drop topic {topic} ")
                kcadmin.close()
                return res
            elif operate == 'insert':
                if data.get('topic_data'):
                    _topic_data.update(data.get('topic_data'))
                else:
                    _topic_data.update(
                        {"string": "string",
                         "number": 1000,
                         'boolean': True,
                         'Array': [1, 2, 3],
                         'object': {'test': 'test'}}
                    )
                kc = self.connector.tapkafkaProducer(
                    bootstrap_servers=self.kb_list,
                    value_serializer=lambda v: json.dumps(v).encode('utf-8')
                )
                kc.send(topic, _topic_data, headers=[(self.op, bytes(self.op_insert, encoding='utf-8'))])
                kc.close()
            elif operate == 'update':
                _topic_data = data.get('topic_data')
                print(topic)
                print(self.op_update)
                print(_topic_data)
                kc = self.connector.tapkafkaProducer(
                    bootstrap_servers=self.kb_list,
                    value_serializer=lambda v: json.dumps(v).encode('utf-8')
                )
                kc.send(topic, _topic_data, headers=[(self.op, bytes(self.op_update, encoding='utf-8'))])
                kc.close()
            elif operate == 'delete':
                _topic_data = data.get('topic_data')
                kc = self.connector.tapkafkaProducer(
                    bootstrap_servers=self.kb_list,
                    value_serializer=lambda v: json.dumps(v).encode('utf-8')
                )
                kc.send(topic, _topic_data, headers=[(self.op, bytes(self.op_delete, encoding='utf-8'))])
                kc.close()
            else:
                raise NotSupport
            logger.info(f'insert {topic} {_topic_data}')
        except UnknownTopicOrPartitionError:
            logger.warning('UnknownTopicOrPartitionError')

    def drop(self, table):
        self._exec(self.generate_drop(table))

    @connector_query_logger
    def show_tables(self):
        tmp = []
        res = self.connector.tapkafkaAdminClient(bootstrap_servers=self.kb_list).describe_topics()
        for i in res:
            tmp.append(i['topic'])
        return tmp

    @generate_logger
    def generate_drop(self, table=None):
        if table:
            self.CURRENT_TABLE = table
        return {
            'operate': 'drop',
            'topic': [self.CURRENT_TABLE]
        }

    @generate_logger
    def generate_create(self, table_column, table=None):
        if table:
            self.CURRENT_TABLE = table
        return self.generate_insert(table_column, table=self.CURRENT_TABLE)

    @generate_logger
    def generate_select(self, table=None, where=None, order=''):
        if table:
            self.CURRENT_TABLE = table
        if where:
            return f'{self.CURRENT_TABLE}{kafka_split}{where.get(pk)}'
        return self.CURRENT_TABLE

    @generate_logger
    def generate_insert(self, insert, table=None):
        if table:
            self.CURRENT_TABLE = table
        return {
            'operate': 'insert',
            'topic_data': insert,
            'topic': self.CURRENT_TABLE
        }

    @generate_logger
    def generate_delete(self, where, table=None):
        if table:
            self.CURRENT_TABLE = table
        return {
            'operate': 'delete',
            'topic_data': where,
            'topic': self.CURRENT_TABLE
        }

    @generate_logger
    def generate_update(self, update, table=None):
        if table:
            self.CURRENT_TABLE = table
        return {
            'operate': 'update',
            'topic_data': update,
            'topic': self.CURRENT_TABLE
        }
