#!usr/bin/env python  
# -*- coding:utf-8 _*-
""" 
@author:robot
@file: mq02_rabbitmq.py 
@version:
@time: 2022/03/07 
"""
import _thread
import json
import time
import pika
from TapClientConnector.connector.tap_connector00_base import *

pk = 'pk'
mq_split = '<<<>>>'


class TapRabbitMQConnector(TAPConnectorBase):
    rabbit_data = {}
    _rabbit_raw_data = []
    rabbit_json_data = []
    pdk = None

    def __init__(self, kwargs):
        super(TapRabbitMQConnector, self).__init__(kwargs)
        self.host = kwargs.get('database_host')
        self.port = kwargs.get('database_port')
        self.mqUserName = kwargs.get('mqUserName')
        self.mqPassword = kwargs.get('mqPassword')
        self.vhost = kwargs.get('virtualHost')
        self.pdk = kwargs.get('pdk')
        self.op = 'TAPDATA_SYNC_OP'
        self.sync_data = 'TAPDATA_SYNC_DATA'
        self.op_insert = 'INSERT'
        self.op_update = 'UPDATE'
        self.op_delete = 'DELETE'
        if self.pdk:
            self.op = 'mqOp'
            self.sync_data = 'sync_data'
            self.op_insert = 'insert'
            self.op_update = 'update'
            self.op_delete = 'delete'
        self.timeout = 3600
        credentials = pika.PlainCredentials(self.mqUserName, self.mqPassword)
        self.connection = pika.BlockingConnection(
            pika.ConnectionParameters(host=self.host,
                                      port=self.port,
                                      virtual_host=self.vhost,
                                      credentials=credentials,
                                      heartbeat=0), )

    def __rabbit_transform(self, queue, pk=pk):
        self.rabbit_json_data = []
        for column in self.rabbit_data[queue]:
            op = column.get(self.op)
            data = column.get(self.sync_data)
            if op == self.op_insert:
                self.rabbit_json_data.append(data)
            if op == self.op_update:
                for old in self.rabbit_json_data:
                    try:
                        if old[pk] == data[pk]:
                            self.rabbit_json_data.remove(old)
                            self.rabbit_json_data.append(data)
                    except:
                        logger.warning('kafka 数据没有pk预设主键，查询整理结果无效')
            if op == self.op_delete:
                for old in self.rabbit_json_data:
                    try:
                        if old[pk] == data[pk]:
                            self.rabbit_json_data.remove(old)
                    except:
                        logger.warning('kafka 数据没有pk预设主键，查询整理结果无效')

    def __rabbit_query(self, table):
        channel = self.connection.channel()
        logger.info(f'rabbit query {table} queue')
        channel.queue_declare(queue=table, durable=True)

        def close(channel):
            for i in range(self.timeout + 1):
                time.sleep(1)

        # 获取到信息时的callback func
        def callback(ch, method, properties, body):
            logger.info(properties.headers)
            logger.info(body)
            message = {
                self.op: properties.headers.get(self.op),
                self.sync_data: json.loads(body)
            }
            self._rabbit_raw_data.append(message)

        #
        def start():
            channel.basic_consume(
                queue=table,
                on_message_callback=callback,
                auto_ack=True)
            channel.start_consuming()

        try:
            # # 默认等待线程
            # _thread.start_new_thread(close, (channel,))
            # 查询数据线程
            _thread.start_new_thread(start, ())
        except Exception:
            logger.fatal('thread error')
        close(channel)

    def __rabbit_delete(self, table):
        credentials = pika.PlainCredentials(self.mqUserName, self.mqPassword)
        connection = pika.BlockingConnection(
            pika.ConnectionParameters(host=self.host,
                                      port=self.port,
                                      virtual_host=self.vhost,
                                      credentials=credentials,
                                      heartbeat=0), )
        channel = connection.channel()
        logger.info(f'rabbit query {table} queue')
        channel.queue_declare(queue=table, durable=True)

        def close(channel):
            for i in range(self.timeout + 1):
                time.sleep(1)
                channel.stop_consuming()

        # 获取到信息时的callback func
        def callback(ch, method, properties, body):
            logger.info(properties.headers)
            logger.info(body)
            logger.info('delete some data')

        #
        def start():
            channel.basic_consume(
                queue=table,
                on_message_callback=callback,
                auto_ack=True)
            channel.start_consuming()

        try:
            # # 默认等待线程
            # _thread.start_new_thread(close, (channel,))
            # 查询数据线程
            _thread.start_new_thread(start, ())
        except Exception:
            logger.fatal('thread error')
        close(channel)

    def _query(self, table):
        where = None
        if mq_split in table:
            _table = table.split(mq_split)
            table = _table[0]
            where = _table[1]
        if self.rabbit_data.get(table) == []:
            ...
        else:
            self.rabbit_data[table] = []
        self.__rabbit_query(table)
        if not self._rabbit_raw_data:
            logger.fatal(f'rabbitmq {table} consumer timeout {self.timeout} ms')
        self.rabbit_data[table].extend(self._rabbit_raw_data)
        self.__rabbit_transform(queue=table)
        new_return = self.rabbit_json_data
        if where:
            for one in new_return:
                if one[pk] == where:
                    return one
            return []
        return new_return

    # 消费queue，由于线程安全问题，重新开一个线程进行源数据清理
    def drop(self, table):
        self.__rabbit_delete(table)

    def _exec(self, data):
        try:
            operate = data.get('operate')
            queue = data.get('queue')
            queue_data = data.get('queue_data')
        except:
            operate = 'insert'
            queue = data
            queue_data = {
                "pk": 1,
                "string": "string",
                "number": 1000,
                'boolean': True,
                'Array': [1, 2, 3],
                'object': {'test': 'test'}
            }
        channel = self.connection.channel()
        channel.queue_declare(queue=queue, durable=True)
        json_data = json.dumps(queue_data)
        bytes_data = bytes(json_data, encoding='utf-8')
        from pika.spec import BasicProperties
        if operate == 'insert':
            channel.basic_publish(exchange='',
                                  routing_key=queue,
                                  body=bytes_data,
                                  properties=BasicProperties(
                                      headers={self.op: self.op_insert}
                                  ))
        elif operate == 'update':
            channel.basic_publish(exchange='',
                                  routing_key=queue,
                                  body=bytes_data,
                                  properties=BasicProperties(
                                      headers={self.op: self.op_update}
                                  ))
        elif operate == 'delete':
            channel.basic_publish(exchange='',
                                  routing_key=queue,
                                  body=bytes_data,
                                  properties=BasicProperties(
                                      headers={self.op: self.op_delete}
                                  ))

    @generate_logger
    def generate_select(self, table=None, where=None, order=''):
        logger.info('consumer time is 5s')
        self.timeout = 5
        if table:
            self.CURRENT_TABLE = table
        if where:
            return f'{self.CURRENT_TABLE}{mq_split}{where.get(pk)}'
        return self.CURRENT_TABLE

    @generate_logger
    def generate_insert(self, insert, table=None):
        if table:
            self.CURRENT_TABLE = table
        return {
            'operate': 'insert',
            'queue_data': insert,
            'queue': self.CURRENT_TABLE
        }

    @generate_logger
    def generate_update(self, update, where, table=None):
        if table:
            self.CURRENT_TABLE = table
        return {
            'operate': 'update',
            'data': (where, update),
            'queue_data': self.CURRENT_TABLE
        }

    def generate_delete(self, where, table=None):
        if table:
            self.CURRENT_TABLE = table
        return {
            'operate': 'delete',
            'data': where,
            'queue_data': self.CURRENT_TABLE
        }

    @generate_logger
    def generate_create(self, table_column, table=None):
        if table:
            self.CURRENT_TABLE = table
        return self.generate_insert(table_column, table=self.CURRENT_TABLE)

    @generate_logger
    def generate_drop(self, table=None):
        if table:
            self.CURRENT_TABLE = table
        return {
            'operate': 'drop',
            'queue_data': [self.CURRENT_TABLE]
        }


if __name__ == '__main__':
    from TapDataSourceInfo.db_info_pdk import RabbitMQ

    T = TapRabbitMQConnector(RabbitMQ.AUTO_RABBITMQ_TARGET)
    T.CURRENT_TABLE = 'demoB_RABBITMQ1658982274402217797'
    T.timeout = 2
    create_string = T.generate_create({'pk': 1, 'n': 1, 'title': 'title'})
    T.exec(create_string)
