package net.bwie.realtime.jtp.dwd.log.job;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import net.bwie.realtime.jtp.dwd.log.function.DataSplitFuction;
import net.bwie.realtime.jtp.dwd.log.function.IsNewUserUpdateFution;
import net.bwie.realtime.jtp.utils.KafkaUtil;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;

public class JtoAppLogEtlJob2 {
    public static void main(String[] args) throws Exception {
        // 1.env执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        // 2.消费kafka
        DataStream<String> KafkaString = KafkaUtil.consumerKafka(env, "topic-log");
//        KafkaString.print("Kafka");
        // 3.数据转换
        DataStream<String> processKafkaStream = processLog(KafkaString);
        // 4.消费到kafka
        KafkaUtil.producerKafka(processKafkaStream,"dwd-traffic-page-logs");
        // 5.触发执行
        env.execute("JtoAppLogEtlJob2");
    }

    private static DataStream<String> processLog(DataStream<String> stream) {
        // 1.数据清洗
        DataStream<String> dataCleanStream = DataClean(stream);
        // 2.新老用户修改
        DataStream<String> IsNewUserStream = IsNewUserUpdate(dataCleanStream);
//        IsNewUserStream.print("isNew");
        // 3.数据分流
        DataStream<String> DataSplitStream = DateSplit(IsNewUserStream);
        DataSplitStream.print("SplitData");
        return DataSplitStream;
    }

    /**
     * 数据分流
     * @param stream
     * @return
     */
    private static DataStream<String> DateSplit(DataStream<String> stream) {
        final OutputTag<String> errTag = new OutputTag<String>("err-log"){};
        final OutputTag<String> startTag = new OutputTag<String>("start-log"){};
        final OutputTag<String> displayTag = new OutputTag<String>("display-log"){};
        final OutputTag<String> actionsTag = new OutputTag<String>("action-log"){};
        SingleOutputStreamOperator<String> DataSplitStream = stream.process(
                new DataSplitFuction(errTag, startTag, displayTag, actionsTag)
        );
        DataStream<String> err = DataSplitStream.getSideOutput(errTag);
        KafkaUtil.producerKafka(err,"err-logs");
        DataStream <String> start = DataSplitStream.getSideOutput(startTag);
        KafkaUtil.producerKafka(start,"start-logs");
        DataStream <String> display = DataSplitStream.getSideOutput(displayTag);
        KafkaUtil.producerKafka(display,"display-logs");
        DataStream<String> actions = DataSplitStream.getSideOutput(actionsTag);
        KafkaUtil.producerKafka(actions,"action-logs");
        return DataSplitStream;
    }

    /**
     * 新老用户修改
     * @param stream
     * @return
     */
    private static DataStream<String> IsNewUserUpdate(DataStream<String> stream) {
        KeyedStream<String, String> IsNewUserStream = stream.keyBy(new KeySelector<String, String>() {
            @Override
            public String getKey(String s) throws Exception {
                JSONObject jsonObject = JSON.parseObject(s);
                return jsonObject.getJSONObject("common").getString("mid");
            }
        });
        DataStream<String> IsNewUserUpdateStream = IsNewUserStream.process(new IsNewUserUpdateFution());
        return IsNewUserUpdateStream;
    }

    /**
     * 数据清洗
     * @param stream
     * @return
     */
    private static DataStream<String> DataClean(DataStream<String> stream) {
        // 脏数据存入测流,创建测流标签
        final OutputTag<String> dirtyLog = new OutputTag<String>("dirty-log"){};
        SingleOutputStreamOperator<String> cleanData = stream.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String value, Context ctx, Collector<String> out) throws Exception {
                try {
                    // 解析JSON格式数据
                    JSON.parseObject(value);
                    // 是JSON格式直接输出
                    out.collect(value);
                } catch (Exception e) {
                    //不是则存入测流
                    ctx.output(dirtyLog, value);
                }
            }
        });
        // 获取测流脏数据
        SideOutputDataStream<String> dirtyData = cleanData.getSideOutput(dirtyLog);
        KafkaUtil.producerKafka(dirtyData,"dirty-log");
        return cleanData;
    }
}