package com.atguigu.realtime.clients;

import com.alibaba.fastjson.JSONObject;
import com.alibaba.otter.canal.client.CanalConnector;
import com.alibaba.otter.canal.client.CanalConnectors;
import com.alibaba.otter.canal.protocol.CanalEntry;
import com.alibaba.otter.canal.protocol.Message;
import com.atguigu.realtime.constants.TopicConstant;
import com.atguigu.realtime.utils.KafkaClientUtil;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;

import java.net.InetSocketAddress;
import java.util.List;
import java.util.Random;

/**
 * Created by Smexy on 2022/5/25
 *
 *      采取 order_info表中的数据，写入Kafka
 *
 *      ①先创建一个客户端对象CanalConnector
 *
 * ②使用客户端对象连接 Canal server端
 *
 * ③订阅表
 *
 * ④解析订阅到的数据
 *
 * ⑤将数据写入kafka
 */
public class MyCanalClient2 {

    public static void main(String[] args) throws InterruptedException, InvalidProtocolBufferException {

        /*
            ①先创建一个客户端对象CanalConnector

            SocketAddress address：  server端的主机名和端口。
                        参考  conf/canal.properties中
                        canal.ip = hadoop103
                        canal.port = 11111

            String destination: 订阅的mysql实例的instance.properties配置文件存放的目录名.
                            参考  conf/canal.properties中
                            canal.destinations = example

            String username:  不是instance.properties配置的user和密码。
                                 instance.properties配置的canal@canal是去连接Mysql的
                                此处的username和password是去连接CanalServer

             String password:
         */
        CanalConnector canalConnector = CanalConnectors.newSingleConnector(new InetSocketAddress("hadoop103", 11111),
                "example", null, null);

        //②使用客户端对象连接 Canal server端
        canalConnector.connect();

        //③订阅表   格式： 库名.表名
        canalConnector.subscribe("211127.*");

        //④拉取数据
        while(true){

            //拉取一批数据的返回值
            Message message = canalConnector.get(100);
            //没有拉取到数据
            if (message.getId() == -1){

                //歇会再去拉
                System.out.println("当前没有数据产生，歇会再去拉取");

                Thread.sleep(5000);

                continue;

            }

            // 拉取到数据
            //System.out.println(message);
            List<CanalEntry.Entry> entries = message.getEntries();

            for (CanalEntry.Entry entry : entries) {

                //获取表名
                String tableName = entry.getHeader().getTableName();

                // ROWDATA: insert|update|delete
                CanalEntry.EntryType entryType = entry.getEntryType();

                if (entryType.equals(CanalEntry.EntryType.ROWDATA)){

                    //解析数据
                    parseEntry(entry.getStoreValue(),tableName);
                }

            }


        }

    }

    private static void parseEntry(ByteString storeValue,String tableName) throws InvalidProtocolBufferException {

        //反序列化
        CanalEntry.RowChange rowChange = CanalEntry.RowChange.parseFrom(storeValue);

        //order_info insert
        if(tableName.equals("order_info") && rowChange.getEventType() == CanalEntry.EventType.INSERT){

            sendData(rowChange,TopicConstant.ORDER_INFO);

            //order_detail insert
        }else if(tableName.equals("order_detail") && rowChange.getEventType() == CanalEntry.EventType.INSERT){

            sendData(rowChange,TopicConstant.ORDER_DETAIL);

            // user_info insert 和 update
        }else if(tableName.equals("user_info") && (
                rowChange.getEventType() == CanalEntry.EventType.INSERT || rowChange.getEventType() == CanalEntry.EventType.UPDATE)){

            sendData(rowChange,TopicConstant.USER_INFO);

        }

    }

    private static void  sendData(CanalEntry.RowChange rowChange,String topic){

        List<CanalEntry.RowData> rowDatasList = rowChange.getRowDatasList();

        for (CanalEntry.RowData rowData : rowDatasList) {

            //将一行封装到一个JSONObject
            JSONObject jsonObject = new JSONObject();

            List<CanalEntry.Column> afterColumnsList = rowData.getAfterColumnsList();

            for (CanalEntry.Column column : afterColumnsList) {

                jsonObject.put(column.getName(),column.getValue());

            }

            //模拟随机延迟
            int i = new Random().nextInt(5);

            /*try {
                Thread.sleep(i * 1000);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }*/
            //System.out.println(jsonObject);
            //写到Kafka
            KafkaClientUtil.sendDataToKafka(topic,jsonObject.toJSONString());
        }
    }
}
