package yuekao5.ods;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import org.apache.flink.util.Collector;
import yuekao5.util.KafkaUtil;
import yuekao5.util.MyDeserializationSchemaFunction;

public class ReadFilkCDC {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1); // use parallelism 1 for sink to keep message ordering


        SourceFunction<String> sourceFunction = MySQLSource.<String>builder()
                .hostname("hadoop-single")
                .port(3306)
                .databaseList("yuekao05") // monitor all tables under inventory database
//                .tableList("yuekao05.user_info")
                .username("root")
                .password("root")
                .deserializer(new MyDeserializationSchemaFunction()) // converts SourceRecord to String
                .build();


        DataStreamSource<String> streamSource = env.addSource(sourceFunction);
//（ODS层采集）新建Maven工程，添加Flink相关依赖，按照数仓分层创建表，编写Flink流式程序，使用Flink CDC技术，实时捕获MySQL表物流业务数据
// ，并过滤脏数据和解析json，实时存储Kafka消息队列：ods-tms-topic；（5分）
        SingleOutputStreamOperator<JSONObject> process = streamSource.process(new ProcessFunction<String, JSONObject>() {
            @Override
            public void processElement(String s, ProcessFunction<String, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(s);
                    collector.collect(jsonObject);
                } catch (Exception e) {

                }
            }
        });
//        streamSource.print();
        process.map(x->JSON.toJSONString(x)).addSink(KafkaUtil.kafkaSink("ods-tms-topic"));
//3）、（ODS层采集）将上述流式程序编译打包，采用flink on yarn的pe
//./bin/flink run -t yarn-per-job --class yuekao5.ods.ReadFilkCDC /root/yuekaolx_5/yuekaolx-1.0-SNAPSHOT.jar
        //./bin/flink run -t yarn-per-job --detached --class yuekao5.ods.ReadFilkCDC /root/yuekaolx_5/yuekaolx-1.0-SNAPSHOT.jar

        env.execute();
    }
}
