package com.zyx.flink.realtime.app;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.zyx.flink.realtime.func.DimAsyncFunction;
import com.zyx.flink.realtime.pojo.UserInfo;
import com.zyx.flink.realtime.utils.ClickHouseUtil;
import com.zyx.flink.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.util.concurrent.TimeUnit;


/**
 * @author zyx
 * @since 2021/6/11 20:21
 * desc: 读取Kafka数据Join后输出到ClickHouse中
 */
public class KafkaToClickHouseApp {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(2);

        env.enableCheckpointing(10 * 1000L);
        env.setStateBackend(new HashMapStateBackend());

        CheckpointConfig ckConfig = env.getCheckpointConfig();
        ckConfig.setCheckpointTimeout(30 * 1000L);
        ckConfig.setCheckpointStorage("hdfs://ns01/test/");
        ckConfig.enableExternalizedCheckpoints(CheckpointConfig
                .ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);


        SingleOutputStreamOperator<UserInfo> userBasicDs = env
                .addSource(MyKafkaUtil.getKafkaSource("user_age", "user_age_consumer01"))
                .map(new MapFunction<String, UserInfo>() {
                    @Override
                    public UserInfo map(String jsonStr) throws Exception {
                        return JSONObject.parseObject(jsonStr, UserInfo.class);
                    }
                });

        SingleOutputStreamOperator<UserInfo> userInfoFullDs = AsyncDataStream.unorderedWait(
                userBasicDs,
                new DimAsyncFunction<UserInfo>("user_info") {
                    @Override
                    public String getKey(UserInfo userInfo) {
                        return userInfo.getId();
                    }

                    @Override
                    public void join(UserInfo userInfo, JSONObject dimInfoJsonObj) {
                        userInfo.setName(dimInfoJsonObj.getString("NAME"));
                        System.out.println("+++++++++++++关联成功, 关联后数据为:" + userInfo);
                    }
                },
                60,
                TimeUnit.SECONDS
        );

        // 异步结果不能直接写入到ClickHouse中, 可以先写入Kafka中
        userInfoFullDs
                .map(new MapFunction<UserInfo, String>() {
                    @Override
                    public String map(UserInfo userInfo) throws Exception {
                        return JSON.toJSONString(userInfo);
                    }
                })
                .addSink(MyKafkaUtil.getKafkaSink("user_info"));

        // 下游消费后, 然后使用FlinkSQL后者FlinkAPI进行转换聚合后写入ClickHouse
        // 这里做演示直接写入ClickHouse
        String insertSql = "insert into user_info values(?,?,?)";
        env
                .addSource(MyKafkaUtil.getKafkaSource("user_info", "user_info_consumer01"))
                .map(new MapFunction<String, UserInfo>() {
                    @Override
                    public UserInfo map(String userInfo) throws Exception {
                        return JSONObject.parseObject(userInfo, UserInfo.class);
                    }
                })
                .addSink(ClickHouseUtil.getJdbcSink(insertSql));

        //userInfoFullDs.addSink(ClickHouseUtil.getJdbcSink(insertSql));
        // userBasicDs.addSink(ClickHouseUtil.getJdbcSink(insertSql));

        // userInfoFullDs.print("+++++++++++结果宽表 >>> ");

        env.execute("kafka_to_clickhouse");
    }
}
