package com.clw.dwd;


import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.clw.KafkaUtils;
import com.clw.bean.TableProcess;

import com.clw.cdc.MySQLUtil;


import com.clw.utils.DimSinkHbase;
import com.clw.utils.PhonexUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.HashMap;
import java.util.List;

/**
 * 动态分流
 */
public class DynamicStream {
    public static void main(String[] args) throws Exception {

        // 1. 获取配置表当中的数据
        List<TableProcess> processList = MySQLUtil.queryList("select * from table_process", TableProcess.class, true);
        HashMap<String, TableProcess> configMap = new HashMap<>();
        for (TableProcess p : processList) {
            String key = p.getSourceTable() + "-" + p.getOperateType();
            OutputTag<JSONObject> tag = new OutputTag<JSONObject>(key) {
            };
            p.setOutputTag(tag);
            configMap.put(key, p);

            if (p.getSinkType().equals("hbase")) {
                //创建表
                PhonexUtil.checkTableByStr(p.getSinkTable(), p.getSinkColumns(), p.getSinkPk(), "");
            }
        }

        // 2. 使用CDC 获取数据
        //1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //1.1 开启CK并指定状态后端为FS    memory  fs  rocksdb
        env.setStateBackend(new FsStateBackend("file:///gmall-flink-210325/ck11"));
        env.enableCheckpointing(5000L);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(10000L);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000);


        FlinkKafkaConsumer<String> kafkaSource = KafkaUtils.createConsumer("mymaxwelltest");
        DataStreamSource<String> streamSource = env.addSource(kafkaSource);
        //
        SingleOutputStreamOperator<JSONObject> process = streamSource.map(x -> {
            return JSON.parseObject(x);
        }).process(new ProcessFunction<JSONObject, JSONObject>() {
            @Override
            public void processElement(JSONObject jsonObject, ProcessFunction<JSONObject, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                String tbName = jsonObject.getString("table");
                String typeName = jsonObject.getString("type");

                TableProcess tableProcess = configMap.get(tbName + "-" + typeName);
                context.output(tableProcess.getOutputTag(), jsonObject);

                collector.collect(jsonObject);
            }
        });

        for (String s : configMap.keySet()) {


            TableProcess tableProcess = configMap.get(s);
            if ("hbase".equals(tableProcess.getSinkType())) {
                // 得到侧流
                DataStream<JSONObject> outputStream = process.getSideOutput(tableProcess.getOutputTag());
                // 这里map 没有问题 可以正常
                SingleOutputStreamOperator<String> map1 = outputStream.map(x -> {
                    return JSON.toJSONString(x);
                });
                // 侧流直接打印可以
                map1.print();
                // 侧流map 就开始异常
                SingleOutputStreamOperator<JSONObject> map4Sink = map1.map(new MapFunction<String, JSONObject>() {
                    @Override
                    public JSONObject map(String s) {
                        JSONObject object = JSON.parseObject(s);
                        object.put("sink_table", tableProcess.getSinkTable());
                        object.put("pk", tableProcess.getSinkPk());
                        object.put("type", tableProcess.getOperateType());
                        //  object.put("type",process.getType());
                        return object;
                    }
                });
                map4Sink.print();
                map4Sink.addSink(new DimSinkHbase());
            }else if(TableProcess.SINK_TYPE_KAFKA.equals(tableProcess.getSinkType())){
                DataStream<JSONObject> sideOutput = process.getSideOutput(tableProcess.getOutputTag());
                sideOutput.addSink(KafkaUtils.createProduer(tableProcess.getSinkTable()));
            }
        }

        //3.打印数据
        streamSource.print();
        //4.启动任务
        env.execute("FlinkCDC");


    }
}
