package com.bw.dwd;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.bw.MyKafkaUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.util.Properties;

public class Test1 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度为1
        env.setParallelism(1);


        // 开启CK
//      FsStateBackend stateBackend = new FsStateBackend(new Path("hdfs://hadoop102:8020/data/ck/fs").toUri(), 1024 * 1024);
        FsStateBackend stateBackend = new FsStateBackend("file:///D:\\javaProject\\FlinkMonth2203A\\ck"); //本地文件系统
        env.setStateBackend(stateBackend);
        // 3.2 开启 checkpoint
        env.enableCheckpointing(5000);

        //kafka读取数据
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop102:9092");
        properties.setProperty("group.id", "test" + System.currentTimeMillis());
        // offset 最早
        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        // 消费topic_log
//        DataStream<String> stream = env
//                .addSource(new FlinkKafkaConsumer<>("topic_log_7", new SimpleStringSchema(), properties));

        DataStream<String> stream = env.addSource(MyKafkaUtil.getKafkaConsumer("topic_log_7", "test" + System.currentTimeMillis()));


//        stream.print();


        // 过滤
        SingleOutputStreamOperator<JSONObject> etlStream = stream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String s, Collector<JSONObject> collector) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(s);
                    collector.collect(jsonObject);
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        });

        // 校验新来用户
        SingleOutputStreamOperator<JSONObject> isNewStream = etlStream.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject jsonObject) throws Exception {
                return jsonObject.getJSONObject("common").getString("mid");
            }
        }).process(new KeyedProcessFunction<String, JSONObject, JSONObject>() {
            private ValueState<String> valueState;

            @Override
            public void open(Configuration parameters) throws Exception {
                valueState = getRuntimeContext().getState(new ValueStateDescriptor<String>("value_state", String.class));
            }

            @Override
            public void processElement(JSONObject jsonObject, KeyedProcessFunction<String, JSONObject, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                String value = valueState.value();
                JSONObject common = jsonObject.getJSONObject("common");
                String isNew = common.getString("is_new");
                if ("1".equals(isNew)) {
                    // 有值证明来过
                    if (value != null) {
                        common.put("is_new", 0);
                    } else {
                        valueState.update("1");
                    }
                }
                collector.collect(jsonObject);
            }
        });

        // 3,
        OutputTag<String> startTag = new OutputTag<String>("start", TypeInformation.of(String.class));
        OutputTag<String> errorTag = new OutputTag<String>("err", TypeInformation.of(String.class));
        OutputTag<String> displayTag = new OutputTag<String>("display", TypeInformation.of(String.class));
        OutputTag<String> actionTag = new OutputTag<String>("action", TypeInformation.of(String.class));
        OutputTag<String> videoTag = new OutputTag<String>("video", TypeInformation.of(String.class));


        SingleOutputStreamOperator<JSONObject> splitStream = isNewStream.process(new ProcessFunction<JSONObject, JSONObject>() {
            @Override
            public void processElement(JSONObject jsonObject, ProcessFunction<JSONObject, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                if (jsonObject.containsKey("start")) {
                    context.output(startTag, jsonObject.toJSONString());
                }
                if (jsonObject.containsKey("err")) {
                    context.output(errorTag, jsonObject.toJSONString());
                }
                if (jsonObject.containsKey("displays")) {
                    context.output(displayTag, jsonObject.toJSONString());
                }
                if (jsonObject.containsKey("actions")) {
                    context.output(actionTag, jsonObject.toJSONString());
                }
                if (jsonObject.containsKey("appVideo")) {
                    context.output(videoTag, jsonObject.toJSONString());
                }
                if (jsonObject.containsKey("page")) {
                    collector.collect(jsonObject);
                }

            }
        });

        // 提取流
        DataStream<String> startStream = splitStream.getSideOutput(startTag);
        DataStream<String> errorStream = splitStream.getSideOutput(errorTag);
        DataStream<String> displayStream = splitStream.getSideOutput(displayTag);
        DataStream<String> actionStream = splitStream.getSideOutput(actionTag);
        DataStream<String> videoStream = splitStream.getSideOutput(videoTag);


        // 侧流
//        startStream.addSink(MyKafkaUtil.getKafkaProducer("start_topic_yk7"));
//        errorStream.addSink(MyKafkaUtil.getKafkaProducer("error_topic_yk7"));
//        displayStream.addSink(MyKafkaUtil.getKafkaProducer("display_topic_yk7"));
//        actionStream.addSink(MyKafkaUtil.getKafkaProducer("action_topic_yk7"));
//        videoStream.addSink(MyKafkaUtil.getKafkaProducer("video_topic_yk7"));


        splitStream.print();
        // 页面流
        splitStream.map(new MapFunction<JSONObject, String>() {
            @Override
            public String map(JSONObject jsonObject) throws Exception {
                return jsonObject.toJSONString();
            }
        }).addSink(MyKafkaUtil.getKafkaProducer("dwd_page_yk7"));

        env.execute();
    }
}
