package yuekao5.dim;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import yuekao5.util.KafkaUtil;
import yuekao5.util.MyHbase;

public class StorageHbase {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //（DIM层维度表）编写Flink 流式程序，实时消费ODS层物流业务数据，将核心维表数据（用户信息表 user_info、字典表 base_dic、地区表 base_region_info、
        // 快递员信息表（、 express_courier ）不同类型数据，并对json数据解析过滤，实时存储业务数据到HBase表（表名称前缀：dim_，表列簇名称：info）。（5分）
        DataStreamSource<String> streamSource = env.addSource(KafkaUtil.kafkaSource("ods-tms-topic"));
//        streamSource.print();

        SingleOutputStreamOperator<String> process = streamSource.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String s, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                String table = JSON.parseObject(s).getString("table");
                if ("user_info,base_dic,base_region_info,express_courier".contains(table)) {
                    collector.collect(s);
                }
            }
        });

//        process.print();

//        process.addSink(new MyHbase());

        env.execute();
    }
}
