package com.zlm.realtime.ods;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.zlm.realtime.util.MyKafkaUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

/**
 * Author: Harbour
 * Date: 2021-07-06 21:23
 * Desc:
 */
public class BaseLogApp {

    //定义待发送的明细层主题信息
    private static final String TOPIC_START = "dwd_start_log";
    private static final String TOPIC_PAGE = "dwd_page_log";
    private static final String TOPIC_DISPLAY = "dwd_display_log";


    public static void main(String[] args) throws Exception {
        // step 1 获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(4); // 设置并行度
        env.enableCheckpointing(5000); // 设置检查点的时间间隔
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE); // 设置检查点的精准一次性
        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000); // 设置检查点的过期时间
        env.setStateBackend(new FsStateBackend("hdfs://10.219.32.55:8020/mall/flink/checkpoint"));
        System.setProperty("HADOOP_USER_NAME","zlm");


        // step 2 设置kafka主题，拿到数据源
        String groupId = "ods_base_log_group1";
        String topic = "ods_base_log";

        FlinkKafkaConsumer<String> kafkaSource = MyKafkaUtils.getKafkaSource(topic, groupId);
        DataStreamSource<String> kafkaDataStream = env.addSource(kafkaSource);

        // step 3 处理数据输出
        SingleOutputStreamOperator<JSONObject> jsonDataStream = kafkaDataStream.map(new MapFunction<String, JSONObject>() {
            @Override
            public JSONObject map(String value) throws Exception {
                return JSON.parseObject(value);
            }
        });

        jsonDataStream.print();

        env.execute("ods_base_job");
    }


}
