package com.zhengb.flink.study.log.analysis.stream;

import com.alibaba.fastjson.JSONObject;
import com.zhengb.flink.study.log.analysis.entity.LogMessageSink;
import com.zhengb.flink.study.log.analysis.entity.LogMessageSource;
import com.zhengb.flink.study.log.analysis.sink.LogAnaylysisSinkFunction;
import com.zhengb.flink.study.log.analysis.watermark.TimeLagWatermarkGenerator;
import com.zhengbo.flink.demo.base.constant.FlinkConstant;
import com.zhengbo.flink.demo.base.kafka.KafkaConfig;
import com.zhengbo.flink.demo.base.utils.DateUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.util.Collector;

import java.util.Collections;
import java.util.Properties;

/**
 * Created by zhengbo on 2020/2/6.
 */
@Slf4j
public class LogAnalysis {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        String topic = "neu_car";

        Properties properties = KafkaConfig.kafkaConsumerProperties(FlinkConstant.KAFKA_BROKER_LIST,
                "log-analysis-test");

        FlinkKafkaConsumer011<String> kafkaConsumer011 = new FlinkKafkaConsumer011<>(
                Collections.singletonList(topic), new SimpleStringSchema(), properties);
        kafkaConsumer011.setStartFromGroupOffsets();

        DataStream<LogMessageSource> logMessageSourceDataStream = env.addSource(kafkaConsumer011).filter(
                new FilterFunction<String>() {
                    @Override
                    public boolean filter(String value) throws Exception {
                        return StringUtils.isNotBlank(value);
                    }
                }).map(
                new MapFunction<String, LogMessageSource>() {
                    @Override
                    public LogMessageSource map(String value) throws Exception {
                        return JSONObject.parseObject(value, LogMessageSource.class);
                    }
                }).filter(
                new FilterFunction<LogMessageSource>() {
                    @Override
                    public boolean filter(LogMessageSource value) throws Exception {
                        return value.getPrefix().equalsIgnoreCase("E")
                                && StringUtils.isNotBlank(value.getDataTime());
                    }
                });

        // 分配waterMark
        logMessageSourceDataStream.assignTimestampsAndWatermarks(new TimeLagWatermarkGenerator())
                .keyBy(new KeySelector<LogMessageSource, String>() {
                    //按照域名进行keyBy
                    @Override
                    public String getKey(LogMessageSource value) throws Exception {
                        return value.getDomains();
                    }
                }).window(TumblingEventTimeWindows.of(Time.seconds(60))).apply(
                new WindowFunction<LogMessageSource, LogMessageSink, String, TimeWindow>() {
                    @Override
                    public void apply(String key, TimeWindow window, Iterable<LogMessageSource> input,
                                      Collector<LogMessageSink> out) throws Exception {
                        log.info("apply key:{}", key);

                        String domain = key;
                        long totalTraffic = 0;

                        long time = 0;

                        for (LogMessageSource logMessageSource : input) {
                            totalTraffic += logMessageSource.getTraffic();

                            time = DateUtil.parseDateTime(logMessageSource.getDataTime()).getTime();
                        }
                        //输出格式 域名、总访问次数、时间
                        out.collect(new LogMessageSink(domain, totalTraffic, time));
                    }
                }).addSink(new LogAnaylysisSinkFunction());


        env.execute("logAnalysis");
    }
}
