package com.atguigu.realtime.clients;

import com.alibaba.fastjson.JSONObject;
import com.alibaba.otter.canal.client.CanalConnector;
import com.alibaba.otter.canal.client.CanalConnectors;
import com.alibaba.otter.canal.protocol.CanalEntry;
import com.alibaba.otter.canal.protocol.Message;
import com.atguigu.realtime.constants.TopicConstant;
import com.atguigu.realtime.utils.KafkaClientUtil;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;

import java.net.InetSocketAddress;
import java.util.List;

/**
 * Created by Smexy on 2022/5/25
 *
 *      采取 order_info表中的数据，写入Kafka
 *
 *      ①先创建一个客户端对象CanalConnector
 *
 * ②使用客户端对象连接 Canal server端
 *
 * ③订阅表
 *
 * ④解析订阅到的数据
 *
 * ⑤将数据写入kafka
 */
public class MyCanalClient {

    public static void main(String[] args) throws InterruptedException, InvalidProtocolBufferException {

        /*
            ①先创建一个客户端对象CanalConnector

            SocketAddress address：  server端的主机名和端口。
                        参考  conf/canal.properties中
                        canal.ip = hadoop103
                        canal.port = 11111

            String destination: 订阅的mysql实例的instance.properties配置文件存放的目录名.
                            参考  conf/canal.properties中
                            canal.destinations = example

            String username:  不是instance.properties配置的user和密码。
                                 instance.properties配置的canal@canal是去连接Mysql的
                                此处的username和password是去连接CanalServer

             String password:
         */
        CanalConnector canalConnector = CanalConnectors.newSingleConnector(new InetSocketAddress("hadoop103", 11111),
                "example", null, null);

        //②使用客户端对象连接 Canal server端
        canalConnector.connect();

        //③订阅表   格式： 库名.表名
        canalConnector.subscribe("211127.order_info");

        //④拉取数据
        while(true){

            //拉取一批数据的返回值
            Message message = canalConnector.get(100);
            //没有拉取到数据
            if (message.getId() == -1){

                //歇会再去拉
                System.out.println("当前没有数据产生，歇会再去拉取");

                Thread.sleep(5000);

                continue;

            }

            // 拉取到数据
            //System.out.println(message);
            List<CanalEntry.Entry> entries = message.getEntries();

            for (CanalEntry.Entry entry : entries) {

               // String tableName = entry.getHeader().getTableName();

                // ROWDATA: insert|update|delete
                CanalEntry.EntryType entryType = entry.getEntryType();

                if (entryType.equals(CanalEntry.EntryType.ROWDATA)){

                    //解析数据
                    parseEntry(entry.getStoreValue());
                }

            }


        }

    }

    private static void parseEntry(ByteString storeValue) throws InvalidProtocolBufferException {

        //反序列化
        CanalEntry.RowChange rowChange = CanalEntry.RowChange.parseFrom(storeValue);

        //取出当前rowChange（1条sql）的语句类型
        if(rowChange.getEventType() == CanalEntry.EventType.INSERT){

            List<CanalEntry.RowData> rowDatasList = rowChange.getRowDatasList();

            for (CanalEntry.RowData rowData : rowDatasList) {

                //将一行封装到一个JSONObject
                JSONObject jsonObject = new JSONObject();

                List<CanalEntry.Column> afterColumnsList = rowData.getAfterColumnsList();

                for (CanalEntry.Column column : afterColumnsList) {

                    jsonObject.put(column.getName(),column.getValue());

                }

                //System.out.println(jsonObject);
                //写到Kafka
                KafkaClientUtil.sendDataToKafka(TopicConstant.ORDER_INFO,jsonObject.toJSONString());
            }

        }

    }
}
