package net.bwie.realtime.jtp.dim.job;

import net.bwie.realtime.jtp.common.utils.KafkaUtil;
import net.bwie.realtime.jtp.dim.Function.DimDataFilterFunction;
import net.bwie.realtime.jtp.dim.Function.DimHbaseSinkFunction;
import net.bwie.realtime.jtp.dim.Function.HbaseDimMapFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.*;


public class RealtimeAsyncDataDimJob {


    public static void main(String[] args) throws Exception {

        //1 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);


        // 2 数据源 source
        DataStream<String> kafkaDataStream = KafkaUtil.consumerKafka(env, "topic-db");
//        kafkaDataStream .print("kafka");


        //3 转换 transformation
        DataStream<String> dimDataStream = handle(kafkaDataStream);


        //4 执行 sink
        dimDataStream.addSink(new DimHbaseSinkFunction());

        //5 触发执行
        env.execute("RealtimeAsyncDataDimJob");
    }


    /**
     *按照表名称过滤获取对应维度表数据流，保存到Hbase表，列簇统一为：info,指定字段名称
     */
    private static DataStream<String> handle(DataStream<String> kafkaDataStream) throws Exception {

        // 维度表名称与主键映射
        HashMap<String, String> dimMap = new HashMap<String, String>() {{
            put("base_dic", "dic_code");
            put("base_province", "id");
            put("base_region", "id");
            put("base_category1", "id");
            put("base_category2", "id");
            put("base_category3", "id");
            put("sku_info", "id");
            put("spu_info", "id");
            put("user_info", "id");
            put("activity_info", "id");
            put("activity_rule", "id");
            put("coupon_info", "id");
            put("base_trademark", "id");
        }};

        //维度表名称
        Set<String> dimSet = new HashSet<>(dimMap.keySet());



        // 过滤维表数据
        SingleOutputStreamOperator<String> asyncStream = kafkaDataStream.filter(new DimDataFilterFunction(dimSet));
//        asyncStream .print("过滤维表数据");

        // -添加字段值
         SingleOutputStreamOperator<String> hbaseStream = asyncStream.map(new HbaseDimMapFunction(dimMap));
//         hbaseStream.print("aaaaa");

        return hbaseStream;
    }
}
