package yuekao2.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import yuekao2.util.MyDeserializationSchemaFunction;
import yuekao2.util.MyHbase;

public class ReadFilkCDC {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //1）编写Flink流式程序，使用Flink CDC实时采集MySQL数据库表的数据，发送存储至Kafka消息队列：topic-db（4分）
        SourceFunction<String> sourceFunction = MySQLSource.<String>builder()
                .hostname("hadoop-single")
                .port(3306)
                .databaseList("gmall") // monitor all tables under inventory database
                .username("root")
                .password("root")
                .deserializer(new MyDeserializationSchemaFunction()) // converts SourceRecord to String
                .build();

        DataStreamSource<String> streamSource = env.addSource(sourceFunction);
        //2）、采用侧边流方式，获取业务数据中不同维度数据（比如 sku_info 商品、user_info 用户、base_province 省份）等；不同维度数据存储hbase数据库；（4分）
        OutputTag<String> sku_info = new OutputTag<String>("sku_info") {};
        OutputTag<String> user_info = new OutputTag<String>("user_info") {};
        OutputTag<String> base_province = new OutputTag<String>("base_province") {};
        SingleOutputStreamOperator<String> process = streamSource.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String s, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                String table = JSON.parseObject(s).getString("table");
                if ("sku_info".equals(table)) {
                    context.output(sku_info, s);
                }
                if ("user_info".equals(table)) {
                    context.output(user_info, s);
                }
                if ("base_province".equals(table)) {
                    context.output(base_province, s);
                }
            }
        });
//        process.getSideOutput(sku_info).print();
//        process.getSideOutput(user_info).print();
//        process.getSideOutput(base_province).print();

        process.getSideOutput(sku_info).addSink(new MyHbase());
        process.getSideOutput(user_info).addSink(new MyHbase());
        process.getSideOutput(base_province).addSink(new MyHbase());
        env.execute();
    }
}
