
import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSON;
import com.bwie.realtime.jtp.dwd.log.function.AppLogSplitProcessFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SideOutputDataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.apache.flink.util.OutputTag;
import utils.DateTimeUtil;
import utils.KafkaUtil;

import java.awt.peer.CanvasPeer;
import java.util.Date;

public class JtpOds {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        DataStream<String> ods_base_log = KafkaUtil.consumerKafka(env, "ODS_BASE_LOG");
        ods_base_log.print();
        DataStream<String>page=processLog(ods_base_log);
        KafkaUtil.producerKafka(page,"dwd_user_log");
        env.execute("aaa");
    }

    private static DataStream<String> processLog(DataStream<String> ods_base_log) {
//        清洗
        DataStream<String>jsonStream=appLogCleaned(ods_base_log);
//        状态编程实现访客数
        DataStream<String> etlStream =processIsNew(jsonStream);
        //数据分流
        DataStream<String>pageStream =splitStream(jsonStream);
//        返回数据流
        return ods_base_log;
    }

    private static DataStream<String> splitStream(DataStream<String> jsonStream) {
        OutputTag<String> error = new OutputTag<String>("error-log") {
        };
        OutputTag<String> start = new OutputTag<String>("start-log") {
        };
        OutputTag<String> display = new OutputTag<String>("display-log") {
        };
        OutputTag<String> action = new OutputTag<String>("action-log") {
        };
        SingleOutputStreamOperator<String> process = jsonStream.process(new AppLogSplitProcessFunction(error, start, display, action));
        DataStream<String> errorStream = process.getSideOutput(error);
        DataStream<String> startStream = process.getSideOutput(start);
        DataStream<String> displayStream = process.getSideOutput(display);
        DataStream<String> actionStream = process.getSideOutput(action);
        KafkaUtil.producerKafka(errorStream, "dwd-traffic-error-log");
        KafkaUtil.producerKafka(startStream, "dwd-traffic-start-log");
        KafkaUtil.producerKafka(displayStream, "dwd-traffic-display-log");
        KafkaUtil.producerKafka(actionStream, "dwd-traffic-action-log");
        return process;
    }

    private static DataStream<String> processIsNew(DataStream<String> jsonStream) {
//        只保留用户打开APP的那一页日志,其他日志不要
        SingleOutputStreamOperator<String> ds1 = jsonStream.filter(t -> JSON.parseObject(t).getJSONObject("page").getString("last_page_id") == null);
//        状态编程只保留每个mid每天第一次登录的数据
        SingleOutputStreamOperator<String> process = ds1.keyBy(t -> JSON.parseObject(t).getJSONObject("common").getString("mid")).process(new KeyedProcessFunction<String, String, String>() {
            ValueState<String> lastDate;

            //            该用户上一次访问的日期
            @Override
            public void open(Configuration parameters) throws Exception {
                lastDate = getRuntimeContext().getState(new ValueStateDescriptor<>("lastDate", String.class));
            }

            @Override
            public void processElement(String s, KeyedProcessFunction<String, String, String>.Context context, Collector<String> collector) throws Exception {
                Long ts = JSON.parseObject(s).getLong("ts");
                String format = DateUtil.format(new Date(ts), "yyyy-MM-dd");
                if (!format.equals(lastDate.value())) {
//                    保留这个日志
                    collector.collect(s);
//                    更新上一次访问的日期
                    lastDate.update(format);
                } else {

                }

            }
        });
        KafkaUtil.producerKafka(process, "dwd_traffic_is_new");

        return null;
    }

    private static DataStream<String> appLogCleaned(DataStream<String> ods_base_log) {
        OutputTag<String> dirty = new OutputTag<String>("dirty-log") {
        };
        SingleOutputStreamOperator<String> process = ods_base_log.process(new ProcessFunction<String, String>() {
            @Override
            public void processElement(String s, ProcessFunction<String, String>.Context context, Collector<String> collector) throws Exception {
                try {
                    JSON.parseObject(s);
                    collector.collect(s);
                } catch (Exception e) {
                    context.output(dirty, s);
                }
            }
        });
        DataStream<String> dirtyStream = process.getSideOutput(dirty);
        KafkaUtil.producerKafka(dirtyStream, "dwd_base_log_dirty");
        return process;
    }
}
