package com.eeboot;

import com.alibaba.fastjson.JSON;
import com.eeboot.util.DateUtil;
import com.eeboot.vo.Log;
import com.eeboot.vo.LogCount;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple1;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.timestamps.AscendingTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.util.Collector;

import javax.annotation.Nullable;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

public class LogStream {

    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        Properties props = new Properties();
        props.put("bootstrap.servers", "172.19.80.177:9092");
        props.put("zookeeper.connect", "172.19.80.177:2181");
        props.put("group.id", "flink-service-group");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("auto.offset.reset", "latest");

        String topic = "filebeat_bsd_bsd_portal";

        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

        DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer011<>(
                topic,
                new SimpleStringSchema(),
                props
        )).setParallelism(1);

        dataStreamSource
                .flatMap(new LogFlatMapFunction())
                .assignTimestampsAndWatermarks(new AssignerWithPeriodicWatermarks<Log>() {

                    private long maxOutOfOrderness = 1000 * 60;
                    private long currentTimestamp = Long.MIN_VALUE;
                    private AscendingTimestampExtractor.MonotonyViolationHandler violationHandler = new AscendingTimestampExtractor.LoggingHandler();

                    @Nullable
                    @Override
                    public Watermark getCurrentWatermark() {
                        return new Watermark(System.currentTimeMillis()-maxOutOfOrderness);
                    }

                    @Override
                    public long extractTimestamp(Log element, long previousElementTimestamp) {
                        final long newTimestamp = element.logTimeLong;
                        if (newTimestamp >= this.currentTimestamp) {
                            this.currentTimestamp = newTimestamp;
                            return newTimestamp;
                        } else {
                            violationHandler.handleViolation(newTimestamp, this.currentTimestamp);
                            return newTimestamp;
                        }
                    }
                })
                .keyBy("profile")
                .timeWindow(Time.minutes(1), Time.minutes(1))
                .aggregate(new CountAgg(), new WindowResultFunction())
                .keyBy("windowEnd")
                .process(new LogList("1分钟窗口"))
                .print()
                ;

//        keyedStream.timeWindow(Time.minutes(2), Time.minutes(1))
//                .aggregate(new CountAgg(), new WindowResultFunction())
//                .keyBy("windowEnd")
//                .process(new LogList("5分钟窗口"))
//                .print()
        ;

        env.execute("kafka source");
    }

}


class LogFlatMapFunction implements FlatMapFunction<String, Log>{
    private static final String DEFAULT_LOG_PATTERN = "\\[([^]]*)]\\s+\\[([^]]*)]\\s+\\[([^]]*)]\\s+\\[([^]]*)]\\s+(\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}\\.\\d{3})\\s+\\[([^]]*)]\\s+\\[([^]]*)]\\s+\\[([^]]*)]\\s+(\\w{3,5})\\s+(.*)";
    Pattern pattern = null;

    public LogFlatMapFunction() {
        pattern = Pattern.compile(DEFAULT_LOG_PATTERN);
    }

    @Override
    public void flatMap(String value, Collector<Log> out) throws Exception {
        String message = JSON.parseObject(value).getString("message");
        if(StringUtils.isNotBlank(message)){
            String[] mArr = message.split("\n");
            for(String m:mArr){
                Log log = null;
                Matcher matcher = pattern.matcher(m);
                if (matcher.find()) {
                    if(matcher.groupCount()==10){
                        log = Log.of(String.valueOf(matcher.group(1))
                                , String.valueOf(matcher.group(2))
                                , String.valueOf(matcher.group(3))
                                , String.valueOf(matcher.group(4))
                                , String.valueOf(matcher.group(5))
                                , String.valueOf(matcher.group(6))
                                , String.valueOf(matcher.group(7))
                                , String.valueOf(matcher.group(8))
                                , String.valueOf(matcher.group(9))
                                , String.valueOf(matcher.group(10)));
                        out.collect(log);
                    }
                }
            }
        }
    }
}

class CountAgg implements AggregateFunction<Log, Long, Long> {
    @Override
    public Long createAccumulator() {
        return 0L;
    }

    @Override
    public Long add(Log value, Long accumulator) {
        Long result = accumulator+1;
        System.out.println(value.toString());
        return result;
    }

    @Override
    public Long getResult(Long accumulator) {
        return accumulator;
    }

    @Override
    public Long merge(Long a, Long b) {
        return a+b;
    }
}

class WindowResultFunction implements WindowFunction<Long, LogCount, Tuple, TimeWindow> {
    @Override
    public void apply(Tuple tuple, TimeWindow window, Iterable<Long> input, Collector<LogCount> out) throws Exception {
        String profile = ((Tuple1<String>)tuple).f0;
        Long count = input.iterator().next();
        out.collect(LogCount.of(profile,window.getStart(), window.getEnd(), count));
    }
}

class LogList extends KeyedProcessFunction<Tuple, LogCount, String> {

    private String tag;

    private ListState<LogCount> itemState;

    public LogList(String tag) {
        this.tag = tag;
    }

    @Override
    public void open(Configuration parameters) throws Exception {
        super.open(parameters);
        ListStateDescriptor<LogCount> stateDescriptor = new ListStateDescriptor<LogCount>("logState-state",LogCount.class);
        itemState = getRuntimeContext().getListState(stateDescriptor);
    }

    @Override
    public void processElement(LogCount value, Context ctx, Collector<String> out) throws Exception {
        itemState.add(value);
        ctx.timerService().registerEventTimeTimer(value.windowEnd+1);
    }

    @Override
    public void onTimer(long timestamp, OnTimerContext ctx, Collector<String> out) throws Exception {
        List<LogCount> allItems = new ArrayList<>();
        for(LogCount item: itemState.get()){
            allItems.add(item);
        }
        itemState.clear();
        allItems.sort(new Comparator<LogCount>() {
            @Override
            public int compare(LogCount o1, LogCount o2) {
                return (int)(o2.count-o1.count);
            }
        });

        StringBuilder result = new StringBuilder();
        result.append("================="+tag+"===================\n");
        result.append("timer时间: ").append(new Timestamp(timestamp-1)).append("\n");
        result.append("系统时间：").append(new Timestamp(System.currentTimeMillis())).append("\n");
        for (int i=0; i<allItems.size(); i++) {
            LogCount currentItem = allItems.get(i);
            // No1:  商品ID=12224  浏览量=2413
            result.append("No").append(i).append(":")
                    .append("  profile=").append(currentItem.profile)
                    .append("  日志量=").append(currentItem.count)
                    .append("  窗口=").append(DateUtil.getStringFromLong("yyyy-MM-dd HH:mm:ss.SSS",currentItem.windowStart)).append("--").append(DateUtil.getStringFromLong("yyyy-MM-dd HH:mm:ss.SSS",currentItem.windowEnd))
                    .append("\n");
        }
        result.append("====================================\n\n");

        out.collect(result.toString());
    }
}