package net.bwie.realtime.jtp.dim.job;

import net.bwie.realtime.jtp.dim.function.DimDataFilterFunction;
import net.bwie.realtime.jtp.dim.function.DimHbaseSinkFunction;
import net.bwie.realtime.jtp.dim.function.HbaseDimMapFunction;
import net.bwie.realtime.jtp.common.utils.KafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;

public class RealtimeAsyncDataDimJob {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        DataStream<String> kafkaStream = KafkaUtil.consumerKafka(env, "jtp_mall_topic");
        // kafkaStream.print("kafkaStream");

        DataStream<String> hbaseStream = handle(kafkaStream);
        // hbaseStream.print("hbaseStream");

        hbaseStream.addSink(new DimHbaseSinkFunction());

        env.execute("RealtimeAsyncDataDimJob");
    }

    private static DataStream<String> handle(DataStream<String> kafkaStream) {
        HashMap<String, String> dimMap = new HashMap<String, String>() {{
            put("base_dic", "dic_code"); put("base_province", "id"); put("base_region", "id");
            put("base_category1", "id"); put("base_category2", "id"); put("base_category3", "id");
            put("sku_info", "id"); put("spu_info", "id"); put("user_info", "id");
            put("activity_info", "id"); put("activity_rule", "id"); put("coupon_info", "id");
            put("base_trademark","id");
        }};
        Set<String> dimSet = new HashSet<>(dimMap.keySet()) ;

        DataStream<String> dimStream = kafkaStream.filter(new DimDataFilterFunction(dimSet));

        return dimStream.map(new HbaseDimMapFunction(dimMap));
    }
}
