package net.bw.realtime.jtp.dim.trade.job;

import net.bw.realtime.jtp.common.utils.KafkaUtil;
import net.bw.realtime.jtp.dim.trade.function.DimDataFilterFunction;
import net.bw.realtime.jtp.dim.trade.function.DimHBaseMapFunction;
import net.bw.realtime.jtp.dim.trade.sink.DimSinkHBaseFunction;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.HashMap;
import java.util.HashSet;

/*
 * @ Author：liuyawei
 * @ Date：2025-06-02
 */
public class RealtimeAsyncDataDimJob {

    public static void main(String[] args) throws Exception {

        // 1.创建环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置并行度
        env.setParallelism(1);

        // 2.读取数据
        DataStream<String> kafkaStream = KafkaUtil.consumerKafka(env, "topic-mall");
        // 输出
        //kafkaStream.print();

        // 3.数据处理
        DataStream<String> handledStream = handle(kafkaStream);
        // 输出
        //handledStream.print();

        // 4.存储维度数据到HBase
        handledStream.addSink(new DimSinkHBaseFunction());

        // 启动程序
        env.execute("RealtimeAsyncDataDimJob");

    }

    private static DataStream<String> handle(DataStream<String> kafkaStream) {

        // 创建map集合映射关系
        HashMap<String, String> hashMap = new HashMap<String, String>(){{
            put("base_dic", "dic_code"); put("base_province", "id"); put("base_region", "id");
            put("base_category1", "id"); put("base_category2", "id"); put("base_category3", "id");
            put("sku_info", "id");  put("spu_info", "id");  put("user_info", "id");  put("base_trademark", "id");
            put("activity_info", "id");  put("activity_rule", "id"); put("coupon_info", "id");
        }};

        // 获取维度表名
        HashSet<String> hashSet = new HashSet<>(hashMap.keySet());

        // 过滤数据
        SingleOutputStreamOperator<String> asyncStream = kafkaStream.filter(new DimDataFilterFunction(hashSet));
        // 输出
        //asyncStream.print();

        // 添加字段
        SingleOutputStreamOperator<String> hbaseStream = asyncStream.map(new DimHBaseMapFunction(hashMap));

        return hbaseStream;

    }

}
