package com.cdc;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import com.function.CustomerDeserialization;
import com.utils.KafkaUtils;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

/**
 * 利用 Flink CDC 技术，期间需要剔除掉不可用的脏数据（如非法的“数值型”），
 * 将 MySQL 数据库中的订单和用户信息表的变更数据实时同步到 Kafka 的 ODS 层相关主题中（5分）
 */
public class ReadMysql {
    public static void main(String[] args) throws Exception {
        //流环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //利用cdc全部读取mysql数据
        DebeziumSourceFunction<String> mysql = MySQLSource.<String>builder()
                .hostname("hadoop-single")
                .port(3306)
                .databaseList("gmall2021") // set captured database
                .tableList("gmall2021.*") // set captured table
                .username("root")
                .password("root")
                .deserializer(new CustomerDeserialization()) // converts SourceRecord to JSON String
                .build();
        DataStreamSource<String> stream = env.addSource(mysql);
        //查看数据
        //分流 脏数据分到测流上
        OutputTag<String> errorTag = new OutputTag<String>("error") {
        };
        SingleOutputStreamOperator<JSONObject> mainDs = stream.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String str, ProcessFunction<String, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                //进行分流
                try {
                    JSONObject jsonObject = JSON.parseObject(str);
                    String type = jsonObject.getString("type");
                    if(!"delete".equals(type)){
                        collector.collect(jsonObject);
                    }
                } catch (Exception e) {
                    context.output(errorTag, str);
                }
            }
        });

        mainDs.print("one>>");

        mainDs.map(x->{
            return x.toString();
        }).addSink(KafkaUtils.createProduer("ods_base_db"));
        //错误数据发送到错误的kafka
//        DataStream<String> errorDs = mainDs.getSideOutput(errorTag);
//        errorDs.addSink(KafkaUtils.createProduer("error"));
        //对主流mainDs进行分流,分出三张表order_info  order_detail  user_info
//        OutputTag<JSONObject> order_infoTag = new OutputTag<JSONObject>("order_info") {
//        };
//        OutputTag<JSONObject> order_detailTag = new OutputTag<JSONObject>("order_detail") {
//        };
//        OutputTag<JSONObject> user_infoTag = new OutputTag<JSONObject>("user_info") {
//        };
//
//        mainDs.print();
//
//        SingleOutputStreamOperator<JSONObject> tableDs = mainDs.process(new ProcessFunction<JSONObject, JSONObject>() {
//            @Override
//            public void processElement(JSONObject jsonObject, ProcessFunction<JSONObject, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
//                //开始分流
//                String tableName = jsonObject.getString("tableName");
//                if ("order_info".equals(tableName)) {
//                    context.output(order_infoTag, jsonObject);
//                } else if ("order_detail".equals(tableName)) {
//                    context.output(order_detailTag, jsonObject);
//                } else if ("user_info".equals(tableName)) {
//                    context.output(user_infoTag, jsonObject);
//                } else {
//                    collector.collect(jsonObject);
//                }
//            }
//        });
//
//        DataStream<JSONObject> orderInfoDs = tableDs.getSideOutput(order_infoTag);
//        DataStream<JSONObject> orderDetailDs = tableDs.getSideOutput(order_detailTag);
//        DataStream<JSONObject> userInfoDs = tableDs.getSideOutput(user_infoTag);
//
//        orderInfoDs.addSink(KafkaUtils.createProduer("ods_orderInfo_db"));
//        orderDetailDs.addSink(KafkaUtils.createProduer("ods_orderDetail_db"));
//        userInfoDs.addSink(KafkaUtils.createProduer("ods_userInfo_db"));

        env.execute();

    }
}
