package com.example.bootredis.flink;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.ververica.cdc.connectors.mysql.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.DebeziumSourceFunction;
import org.apache.commons.lang.ArrayUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.scala.OutputTag;
import org.apache.flink.util.Collector;
import org.springframework.boot.CommandLineRunner;
import org.springframework.stereotype.Component;
import org.apache.flink.configuration.Configuration;
/**
 * 功能描述:使用 flink-cdc 的方式进行数据监听和同步动作。
 * 可以监听不同表，通过不同的 sink 路由处理
 * 可以配置不同的同步方式，全量、最新节点、最早binlog
 * 可以配置从旧的本地配置开始走。
 * @Auther: Solming
 * @Date:  2022/8/22
 * @param: 
 * @return:  
 **/
public class StartUp  {
    public static void main(String[] args) throws Exception {
        /*

        读取断开的配置文件，从此处开始重新进入
        Configuration config = new Configuration();
        config.setString("execution.savepoint.path","D:\\yp\\tmp\\checkpoints\\263d45ec6d959ecbebb36dc33257f7e3\\chk-68");
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(config);

        */
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DebeziumSourceFunction<String> sourceFunction = MySqlSource.<String>builder()
                .hostname("118.25.198.145")
                .port(13306)
                .username("root")
                .password("root")
                .databaseList("monomer_order")
                .tableList("monomer_order.article") //多个表逗号分隔
                .deserializer(new LinkConfig()) //自定义返回结果集
//                .startupOptions(StartupOptions.latest())//最新的配置走
                .startupOptions(StartupOptions.initial())//全量
                .serverTimeZone("UTC")
                .build();

        DataStreamSource<String> streamSource = env.addSource(sourceFunction);
        // 多表进行分片处理
        OutputTag<String> orderTag = new OutputTag<>("article", Types.STRING);
//        OutputTag<String> userTag = new OutputTag<>("表2", Types.STRING);
        env.setStateBackend(new FsStateBackend("file:///D:\\yp\\tmp\\checkpoints"));
        env.enableCheckpointing(3000);
        SingleOutputStreamOperator<String> process = streamSource.map((MapFunction<String, JSONObject>) JSON::parseObject).process(new ProcessFunction<JSONObject, String>() {
            @Override
            public void processElement(JSONObject value, Context context, Collector<String> collector) {

                if ("article".equals(value.getString("tableName"))) {
                    context.output(orderTag, value.toJSONString());
                } else if ("表2".equals(value.getString("tableName"))) {
//                    context.output(userTag, value.toJSONString());
                }
            }
        });
        DataStream<String> orderStream = process.getSideOutput(orderTag);
//        DataStream<String> userStream = process.getSideOutput(userTag);
        orderStream.print();
//        userStream.print();
        //自定义sink
        streamSource.addSink(new ListenerOrderSink());
        env.execute("fLinkCDC");
    }
}

