package com.bw.yk08;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import com.alibaba.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.alibaba.ververica.cdc.debezium.DebeziumSourceFunction;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import javax.swing.*;

public class Test02 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env=StreamExecutionEnvironment.createLocalEnvironment();
        env.setParallelism(1);

        env.enableCheckpointing(5000L);
        StateBackend stateBackend=new FsStateBackend("files://my/checkpoint/dir");
        env.setStateBackend(stateBackend);
//        env.getCheckpointConfig().setCheckpointStorage("files:///my/checkpoint/dir");
        env.getCheckpointConfig().enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

        DataStreamSource<String> odsSourceDs=env.addSource(MyKafkaUtil.getFlinkKafkaConsumer("ods_base_topic","test02"));

        odsSourceDs.print();

        SingleOutputStreamOperator<String> odsDs=odsSourceDs.filter(
                new FilterFunction<String>() {
                    @Override
                    public boolean filter(String s) throws Exception {
                        try
                        {
                            JSONObject ob= JSON.parseObject(s);
                            return true;
                        }catch (Exception e)
                        {
                            e.printStackTrace();
                        }
                        return false;
                    }
                }
        );


        OutputTag<String> order_detail=new OutputTag<String>("order_detail"){};

        SingleOutputStreamOperator<String> orderDS=odsDs.process(
                new ProcessFunction<String, String>() {

                    @Override
                    public void processElement(String s, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                        JSONObject ob= JSON.parseObject(s);

                        if(ob!=null)
                        {
                            String table=ob.getString("table");
//order_detail
                            if(table.equals("order_info"))
                            {
                                collector.collect(ob.getJSONObject("data").toJSONString());
                            }else if(table.equals("order_detail"))
                            {
                                context.output(order_detail,ob.getJSONObject("data").toJSONString());
                            }
                        }
                    }
                }
        );

        orderDS.addSink(MyKafkaUtil.getFlinkKafkaProducer("dwd_order_info_topic"));

        orderDS.getSideOutput(order_detail).addSink(MyKafkaUtil.getFlinkKafkaProducer("dwd_order_detail_topic"));

        env.execute();
    }
}
