package com.bawei.tk4;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

import java.util.Properties;

public class FlinkTM1234 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        env.enableCheckpointing(3000);
        env.setStateBackend(new FsStateBackend("file:\\C:\\Users\\hello\\Desktop\\ck"));

        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "hadoop-single:9092");
        properties.setProperty("group.id", "group1");
        FlinkKafkaConsumer<String> flinkKafkaConsumer=new FlinkKafkaConsumer<>("ods_base_log", new SimpleStringSchema(), properties);
        flinkKafkaConsumer.setStartFromEarliest();

        DataStream<String> stream = env.addSource(flinkKafkaConsumer);

        //过滤掉脏数据（空字符串、null或解析JSON字符串失败）；（5分）//算子 filter  flatmap
        SingleOutputStreamOperator<JSONObject> filterDS = stream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String s, Collector<JSONObject> collector) throws Exception {
                if (s != null && s.length() > 0) {
                    try {
                        JSONObject jsonObject = JSON.parseObject(s);
                        collector.collect(jsonObject);
                    } catch (Exception e) {

                    }
                }
            }
        });

        //4）、（DWD层）使用sideOutput侧边流技术，将上述日志数据划分为启动日志数据、页面日志数据和曝光日志数据，并存储Kafka消费队列：dwd-page-log、dwd-start-log、dwd-display-log；（5分）
        OutputTag<JSONObject> startTag = new OutputTag<JSONObject>("start") {};
        OutputTag<JSONObject> displayTag = new OutputTag<JSONObject>("display") {};

        SingleOutputStreamOperator<JSONObject> pageDs = filterDS.process(new ProcessFunction<JSONObject, JSONObject>() {
            @Override
            public void processElement(JSONObject jsonObject, ProcessFunction<JSONObject, JSONObject>.Context context, Collector<JSONObject> collector) throws Exception {
                if (jsonObject.containsKey("start")) {
                    context.output(startTag, jsonObject);
                } else {
                    collector.collect(jsonObject);
                    if (jsonObject.containsKey("displays")) {
                        context.output(displayTag, jsonObject);
                    }
                }
            }
        });
        pageDs.print("pageDs>>>");
        DataStream<JSONObject> startDS = pageDs.getSideOutput(startTag);
        DataStream<JSONObject> displayDS = pageDs.getSideOutput(displayTag);
        startDS.print("startDS>>>");
        //displayDS.print("displayDS>>>");

        //
        FlinkKafkaProducer<String> myProducer1 = new FlinkKafkaProducer<>("dwd_page_log", new SimpleStringSchema(), properties);
        FlinkKafkaProducer<String> myProducer2 = new FlinkKafkaProducer<>("dwd_start_log", new SimpleStringSchema(), properties);
        FlinkKafkaProducer<String> myProducer3 = new FlinkKafkaProducer<>("dwd_display_log", new SimpleStringSchema(), properties);

        pageDs.map(x->x.toString()).addSink(myProducer1);
        startDS.map(x->x.toString()).addSink(myProducer2);
        displayDS.map(x->x.toString()).addSink(myProducer3);

        env.execute();
    }
}
