package yuekao3.dim;

import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
import yuekao3.util.MyDeserializationSchemaFunction;
import yuekao3.util.MyHbase;

public class StorageHbase {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1); // use parallelism 1 for sink to keep message ordering
//        2）、（DIM层）编写Flink DataStream程序，采用CDC将业务维度表：user_info 用户表、sku_info 商品表、base_province 省份表同步到Hbase中（8分）
        SourceFunction<String> sourceFunction = MySQLSource.<String>builder()
                .hostname("hadoop-single")
                .port(3306)
                .databaseList("yuekao01") // monitor all tables under inventory database
                .tableList("yuekao01.user_info,yuekao01.sku_info,yuekao01.base_province")
//                .startupOptions(StartupOptions.initial())
                .username("root")
                .password("root")
                .deserializer(new MyDeserializationSchemaFunction()) // converts SourceRecord to String
                .build();
        DataStreamSource<String> streamSource = env.addSource(sourceFunction);
        streamSource.print();
        streamSource.addSink(new MyHbase());
        env.execute();
    }
}
