import datetime
import json
from pymysqlreplication import BinLogStreamReader
from pymysqlreplication.row_event import (
    DeleteRowsEvent,
    UpdateRowsEvent,
    WriteRowsEvent
)
from kafka import KafkaProducer
class DateEncoder(json.JSONEncoder):
    """
    自定义类，解决报错：
    TypeError: Object of type 'datetime' is not JSON serializable
    """

    def default(self, obj):
        if isinstance(obj, datetime.datetime):
            return obj.strftime('%Y-%m-%d %H:%M:%S')

        elif isinstance(obj, datetime.date):
            return obj.strftime("%Y-%m-%d")

        else:
            return json.JSONEncoder.default(self, obj)


# 配置数据库信息
mysql_settings = {
    'host': '127.0.0.1',
    'port': 3306,
    'user': 'root',
    'passwd': '123456'
}


def main():
    # 实例化binlog 流对象
    stream = BinLogStreamReader(
        connection_settings=mysql_settings,
        server_id=100,  # slave标识，唯一
        blocking=True,  # 阻塞等待后续事件
        # 设定只监控写操作：增、删、改
        resume_stream=True,  # True为从最新位置读取, 默认False
        only_events=[
            DeleteRowsEvent,
            UpdateRowsEvent,
            WriteRowsEvent
        ]
    )
    producer = KafkaProducer(bootstrap_servers='192.168.174.141:9092,192.168.174.142:9092,192.168.174.143:9092',
                             value_serializer=lambda v: json.dumps(v, ensure_ascii=False).encode('utf-8'))
    for event in stream:
        for j in event.rows:
            if 'order_id' in j['values']:
                producer.send("test4", j['values'])
            else:
                producer.send("test5", j['values'])



if __name__ == '__main__':
    main()
"""
输出数据格式
{
    "schema": "demo",    # 数据库名
    "table": "student",  # 表名
    "action": "update",  # 动作 insert、delete、update
    "data": {            # 数据，里边包含所有字段
        "id": 26, 
        "name": "haha", 
        "age": 34, 
        "update_time": "2019-06-06 16:59:06", 
        "display": 0
    }
}
"""
