package com.atguigu.edu.realtime.app.dwd.log;

import com.alibaba.fastjson.JSONAware;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.common.kafkaTopics;
import com.atguigu.edu.realtime.util.KafkaUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

public class DwdTrfficPageOpt {
    public static void main(String[] args) throws Exception {
        // TODO 1、获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);


        /*// TODO 2、设置检查点和状态后端
        env.enableCheckpointing(5*60*1000L, CheckpointingMode.EXACTLY_ONCE);
        //延迟时间
        env.getCheckpointConfig().setCheckpointTimeout(3*60*1000L);
        //最多可以出现多少状态后端
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        //检查点保存位置
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME","atguigu");
*/
        //开启状态后端
        //        env.setStateBackend(new HashMapStateBackend());
        //TODO 3、读取kafka数据
        String topicNmae = "dwd_traffic_page_log";
        String groupId = "dwd_trffic_page_opt_TEST";
        DataStreamSource<String> kafkaStream = env.addSource(KafkaUtil.getKafkaConsumer(topicNmae, groupId));
        //TODO 4、过滤转换数据
        SingleOutputStreamOperator<JSONObject> flatMapStream = kafkaStream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                JSONObject outJson = new JSONObject();
                try {
                    JSONObject jsonObject = JSONObject.parseObject(value);
                    JSONObject common = jsonObject.getJSONObject("common");
                    JSONObject page = jsonObject.getJSONObject("page");
                    outJson.put("session_id", common.getString("sid"));
                    outJson.put("source", common.getString("sc"));
                    outJson.put("is_new", common.getString("is_new"));
                    outJson.put("pageid", page.getString("page_id"));
                    outJson.put("during_time", page.getLong("during_time"));
                    outJson.put("ts", jsonObject.getLong("ts"));
                } catch (Exception e) {
                    e.printStackTrace();
                }

                out.collect(outJson);
            }
        });
        flatMapStream.print("flatmap");

        //TODO 5、写出到kafka
        flatMapStream.map(JSONAware::toJSONString).addSink(KafkaUtil.getKafkaProducer(kafkaTopics.DWD_TRAFFIC_PAGE_OPT));

        //TODO 执行
        env.execute(groupId);
    }
}
