package com.eeboot;

import com.alibaba.fastjson.JSON;
import com.eeboot.constant.FlinkConst;
import com.eeboot.util.KafKaUtil;
import com.eeboot.vo.Log;
import com.eeboot.vo.LogCount;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple1;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011;
import org.apache.flink.util.Collector;

import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

public class LogCounter {

	public static void main(String[] args) throws Exception {
		//参数检查
		if (args.length != 8) {
			System.err.println(new StringBuilder().append("USAGE:\n")
					.append("LogCounter <bootstrapServers> <groupId> <topic> <sinkTopic> <parallelism> <windowSize(sec)> <windowSlide(sec)> <delay(sec)>")
					.append("\n")
					.append("LogCounter <bootstrapServers> <groupId> <读取topic> <输出topic> <并行度> <窗口长度(sec)> <滑动长度(sec)> <窗口等待时间(sec)>")
					.toString());
			return;
		}

		String bootstrapServers = args[0];
		String groupId = args[1];
		String topic = args[2];
		String sinkTopic = args[3];
		int parallelism = Integer.parseInt(args[4]);
		long windowSize = Long.parseLong(args[5]);
		long windowSlide = Long.parseLong(args[6]);
		long delay = Long.parseLong(args[7]);

		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		Properties props = KafKaUtil.genKafkaProperties(bootstrapServers, groupId);

		env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
		env.setParallelism(1);
		DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer011<>(
				topic,
				new SimpleStringSchema(),
				props
		)).setParallelism(parallelism);


		dataStreamSource
				.flatMap(new LogFlatMapFunction())
				.assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<Log>(Time.seconds(delay)) {
							@Override
							public long extractTimestamp(Log element) {
								return element.logTimeLong;
							}
						})
				.keyBy("countKey")
				.timeWindow(Time.seconds(windowSize), Time.seconds(windowSlide))
				.aggregate(new CountAgg(), new WindowResultFunction())
				.addSink(new FlinkKafkaProducer011<LogCount>(bootstrapServers, sinkTopic, new SerializationSchema<LogCount>() {
					@Override
					public byte[] serialize(LogCount element) {
						System.out.println("===out to sink=="+element.toJsonString());
						return element.toJsonString().getBytes();
					}
				}))
				;
		env.execute("log count with window:"+windowSize+"-"+windowSlide);

	}

	static class LogFlatMapFunction implements FlatMapFunction<String, Log>{
		private static final String DEFAULT_LOG_PATTERN = "\\[([^]]*)]\\s+\\[([^]]*)]\\s+\\[([^]]*)]\\s+\\[([^]]*)]\\s+(\\d{4}-\\d{2}-\\d{2}\\s\\d{2}:\\d{2}:\\d{2}\\.\\d{3})\\s+\\[([^]]*)]\\s+\\[([^]]*)]\\s+\\[([^]]*)]\\s+(\\w{3,5})\\s+(.*)";
		Pattern pattern = null;

		public LogFlatMapFunction() {
			pattern = Pattern.compile(DEFAULT_LOG_PATTERN);
		}

		@Override
		public void flatMap(String value, Collector<Log> out) throws Exception {
			String message = JSON.parseObject(value).getString("message");
			if(StringUtils.isNotBlank(message)){
				String[] mArr = message.split("\n");
				for(String m:mArr){
					Log log = null;
					Matcher matcher = pattern.matcher(m);
					if (matcher.find()) {
						if(matcher.groupCount()==10){
							log = Log.of(String.valueOf(matcher.group(1)), String.valueOf(matcher.group(2)), String.valueOf(matcher.group(5)), String.valueOf(matcher.group(9)));

//							log = Log.of(String.valueOf(matcher.group(1))
//									, String.valueOf(matcher.group(2))
//									, String.valueOf(matcher.group(3))
//									, String.valueOf(matcher.group(4))
//									, String.valueOf(matcher.group(5))
//									, String.valueOf(matcher.group(6))
//									, String.valueOf(matcher.group(7))
//									, String.valueOf(matcher.group(8))
//									, String.valueOf(matcher.group(9))
//									, String.valueOf(matcher.group(10)));
							out.collect(log);
						}
					}
				}
			}
		}
	}



	static class CountAgg implements AggregateFunction<Log, Long, Long> {
		@Override
		public Long createAccumulator() {
			return 0L;
		}

		@Override
		public Long add(Log value, Long accumulator) {
			Long result = accumulator+(FlinkConst.LOG_LEVEL_ERROR.equals(value.logLevel)?1:0);
			return result;
		}

		@Override
		public Long getResult(Long accumulator) {
			return accumulator;
		}

		@Override
		public Long merge(Long a, Long b) {
			return a+b;
		}
	}

	static class WindowResultFunction implements WindowFunction<Long, LogCount, Tuple, TimeWindow> {
		@Override
		public void apply(Tuple tuple, TimeWindow window, Iterable<Long> input, Collector<LogCount> out) throws Exception {
			String profile = ((Tuple1<String>)tuple).f0;
			Long count = input.iterator().next();
			out.collect(LogCount.of(profile,window.getStart(), window.getEnd(), count));
		}
	}


}
