package com.atguigu.app;

import com.alibaba.fastjson.JSONObject;
import com.alibaba.otter.canal.client.CanalConnector;
import com.alibaba.otter.canal.client.CanalConnectors;
import com.alibaba.otter.canal.protocol.CanalEntry;
import com.alibaba.otter.canal.protocol.Message;
import com.atguigu.constants.GmallConstants;
import com.atguigu.utils.MyKafkaSender;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;

import java.net.InetSocketAddress;
import java.util.List;

/**
 * @author Lec
 * @date 2022/7/18 17:48
 */

public class CanalClient {
    public static void main(String[] args) throws InvalidProtocolBufferException {
        //1.获取连接器
        CanalConnector canalConnector = CanalConnectors.newSingleConnector(new InetSocketAddress("hadoop102", 11111), "example", "", "");

        while (true) {
        //2.获取连接
            canalConnector.connect();
        //3.设置订阅的数据库
            canalConnector.subscribe("gmall220309.*");
        //4.获取多个SQL执行的结果
            Message message = canalConnector.get(100);
        //5.获取一个SQL执行的结果
            List<CanalEntry.Entry> entries = message.getEntries();
        //6.判断mysql中数据是否更新
            if (entries.size() <= 0) {
                System.out.println("没有数据，休息一会......");
                try {
                    Thread.sleep(5000);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            } else {
        //7.获取每一个entry
                for (CanalEntry.Entry entry : entries) {
        //8.获取表名
                    String tableName = entry.getHeader().getTableName();
        //9.获取entry类型
                    CanalEntry.EntryType entryType = entry.getEntryType();
        //10.根据entry类型判断获取序列化数据
                    if (entryType.equals(CanalEntry.EntryType.ROWDATA)) {
                        ByteString storeValue = entry.getStoreValue();
        //11.返回序列化数据
                        CanalEntry.RowChange rowChange = CanalEntry.RowChange.parseFrom(storeValue);
        //12.获取事件类型
                        CanalEntry.EventType eventType = rowChange.getEventType();
        //13.获取多行数据
                        List<CanalEntry.RowData> rowDatasList = rowChange.getRowDatasList();
        //14.根据表名和事件类型，获取相应的数据
                        handle(tableName, eventType, rowDatasList);
                    }

                }

            }

        }
    }


    private static void handle(String tableName, CanalEntry.EventType eventType, List<CanalEntry.RowData> rowDatasList) {
        //获取订单表的数据
        if ("order_info".equals(tableName) && CanalEntry.EventType.INSERT.equals(eventType)) {
        //遍历rowDataList获取到每一行订单表新增的数据
            for (CanalEntry.RowData rowData : rowDatasList) {
        //获取到一行中每一列的数据
                List<CanalEntry.Column> afterColumnsList = rowData.getAfterColumnsList();
                JSONObject jsonObject = new JSONObject();
                //遍历集合获取到具体的值
                for (CanalEntry.Column column : afterColumnsList) {
                    //将一行数据封装成Json
                    jsonObject.put(column.getName(), column.getValue());
                }

                MyKafkaSender.send(GmallConstants.KAFKA_TOPIC_ORDER,jsonObject.toJSONString());
                System.out.println(jsonObject.toJSONString());

            }

        }

    }
}



//package com.atguigu.app;
//
//import com.alibaba.fastjson.JSONObject;
//import com.alibaba.otter.canal.client.CanalConnector;
//import com.alibaba.otter.canal.client.CanalConnectors;
//import com.alibaba.otter.canal.protocol.CanalEntry;
//import com.alibaba.otter.canal.protocol.Message;
//import com.atguigu.constants.GmallConstants;
//import com.atguigu.utils.MyKafkaSender;
//import com.google.protobuf.ByteString;
//import com.google.protobuf.InvalidProtocolBufferException;
//
//import java.net.InetSocketAddress;
//import java.util.List;
//
//public class CanalClient {
//    public static void main(String[] args) throws InvalidProtocolBufferException {
//        //1.获取连接器
//        CanalConnector canalConnector = CanalConnectors.newSingleConnector(new InetSocketAddress("hadoop102", 11111), "example", "", "");
//
//        while (true){
//            //2.获取连接
//            canalConnector.connect();
//
//            //3.设置订阅的数据库
//            canalConnector.subscribe("gmall220309.*");
//
//            //4.获取多个sql执行的结果
//            Message message = canalConnector.get(100);
//
//            //5.获取一个sql执行的结果
//            List<CanalEntry.Entry> entries = message.getEntries();
//
//            //6.判断Mysql中数据是否更新
//            if (entries.size()<=0){
//                System.out.println("没有数据休息一会。。。。");
//                try {
//                    Thread.sleep(5000);
//                } catch (InterruptedException e) {
//                    e.printStackTrace();
//                }
//            }else {
//                //7.获取每一个entry
//                for (CanalEntry.Entry entry : entries) {
//                    //TODO 8.获取表名
//                    String tableName = entry.getHeader().getTableName();
//
//                    //9.获取entry类型
//                    CanalEntry.EntryType entryType = entry.getEntryType();
//
//                    //10.根据entry类型判断获取序列化数据
//                    if (entryType.equals(CanalEntry.EntryType.ROWDATA)){
//                        ByteString storeValue = entry.getStoreValue();
//
//                        //11.返序列化数据
//                        CanalEntry.RowChange rowChange = CanalEntry.RowChange.parseFrom(storeValue);
//
//                        //TODO 12.获取事件类型
//                        CanalEntry.EventType eventType = rowChange.getEventType();
//
//                        //TODO 13.获取多行数据
//                        List<CanalEntry.RowData> rowDatasList = rowChange.getRowDatasList();
//
//                        //根据表名和事件类型，获取相应的数据
//                        handle(tableName, eventType, rowDatasList);
//                    }
//
//                }
//            }
//        }
//    }
//
//    private static void handle(String tableName, CanalEntry.EventType eventType, List<CanalEntry.RowData> rowDatasList) {
//        //获取订单表的数据
//        if ("order_info".equals(tableName)&&CanalEntry.EventType.INSERT.equals(eventType)){
//            //遍历rowDatasList获取到每一行订单表新增的数据
//            for (CanalEntry.RowData rowData : rowDatasList) {
//                //获取到一行中每一列的数据
//                List<CanalEntry.Column> afterColumnsList = rowData.getAfterColumnsList();
//                JSONObject jsonObject = new JSONObject();
//                //遍历集合获取到具体的值
//                for (CanalEntry.Column column : afterColumnsList) {
//                    //将一行数据封装成Json
//                    jsonObject.put(column.getName(), column.getValue());
//                }
//                MyKafkaSender.send(GmallConstants.KAFKA_TOPIC_ORDER, jsonObject.toJSONString());
//                System.out.println(jsonObject.toJSONString());
//            }
//        }
//    }
//}
