package com.zhang.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.zhang.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

/**
 * 重点
 *
 * @title: 业务数据动态分流
 * @author: zhang
 * @date: 2022/3/3 09:28
 * 事实数据：放到kafka不同主题
 * 维度数据：放到Hbase（通过Phoenix）不同表
 */
public class BaseDBApp_kafka_test {
    public static void main(String[] args) throws Exception {
        //todo 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        //todo 2.设置检查点

        //todo 3.从kafka读取业务数据
        //3.1 声明消费主题和消费者组
        String topic = "ods_base_db_m_2022";
        String groupId = "base_db_app";
        //3.2 创建消费者对象 消费数据 封装为流
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getKafkaSource(topic, groupId));
        //todo 4.对去数据进行转化
        SingleOutputStreamOperator<JSONObject> jsonOdjDS = kafkaDS.map(JSON::parseObject);
        //todo 5.对读取数据进行ETL
        SingleOutputStreamOperator<JSONObject> filterDS = jsonOdjDS.filter(
                new FilterFunction<JSONObject>() {
                    @Override
                    public boolean filter(JSONObject value) throws Exception {
                        boolean flag =
                                value.getString("table") != null &&
                                        value.getString("table").length() > 0 &&
                                        value.getJSONObject("data") != null &&
                                        value.getString("data").length() > 3;
                        return flag;
                    }
                }
        );

        filterDS.print();


        //todo 11.执行任务
        env.execute("BaseDBApp");
    }
}
