package com.navinfo.platform.trip.analysis.flink;

import com.navinfo.platform.trip.analysis.flink.config.AlgorithmConfig;
import com.navinfo.platform.trip.analysis.flink.config.RedisConfig;
import com.navinfo.platform.trip.analysis.flink.config.SystemConfig;
import com.navinfo.platform.trip.analysis.flink.watermark.BoundedOutOfOrdernessWatermarks;
import com.navinfo.platform.trip.analysis.flink.window.TripWindowEvictor;
import com.navinfo.platform.trip.analysis.flink.window.TripWindowFunction;
import com.navinfo.platform.trip.analysis.flink.sink.KafkaProducer;
import com.navinfo.platform.trip.analysis.flink.source.KafkaConsumer;
import com.navinfo.platform.trip.analysis.flink.window.TripWindowTrigger;
import com.navinfo.platform.trip.analysis.pojo.DF_OuterStatisticData;
import com.navinfo.platform.trip.common.arithmetic.data.OuterEventData;
import com.navinfo.platform.trip.common.enums.SystemTypeEnum;
import com.navinfo.platform.trip.common.pojo.PointProtocol;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.typeutils.PojoTypeInfo;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.GlobalWindows;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.util.OutputTag;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;
import java.util.Arrays;
import java.util.Set;
import java.util.stream.Collectors;

/**
 * 驾驶行为分析-流计算版本
 */
public class TripAnalysisStreaming {
	private static final Logger logger = LoggerFactory.getLogger(TripAnalysisStreaming.class);
	/**
	 * 系统类型，1东风，2青汽，3一汽
	 */
	public static SystemTypeEnum systemType;

	public static void main(String[] args) throws Exception {
		logger.error(">>>>>>实时驾驶行为分析程序开始启动>>>>>>");
		//加载系统参数
		ParameterTool config = SystemConfig.load(args);
		StreamExecutionEnvironment env = SystemConfig.init(config);

		//设置全局参数
		ParameterTool algorithmConfig = AlgorithmConfig.load(args);
		ParameterTool redisConfig = RedisConfig.load(args);
		ParameterTool jobConfig = config.mergeWith(algorithmConfig).mergeWith(redisConfig);
		env.getConfig().setGlobalJobParameters(jobConfig);

		//创建数据输入
		KafkaConsumer kafkaConsumer = new KafkaConsumer(args);
		FlinkKafkaConsumer<PointProtocol> consumerLocation = kafkaConsumer.locationData();
		FlinkKafkaConsumer<PointProtocol> consumerRealtime = kafkaConsumer.realTimeData();

		//创建数据输出
		KafkaProducer kafkaProducer = new KafkaProducer(args);
		FlinkKafkaProducer<DF_OuterStatisticData> producerTripStatistic = kafkaProducer.tripStatistic();
		FlinkKafkaProducer<OuterEventData> producerTripEvent = kafkaProducer.tripEvent();

		//创建输出旁路输出标签
		OutputTag<OuterEventData> eventOutputTag = new OutputTag<>("trip_event_output", PojoTypeInfo.of(OuterEventData.class));

        //指定SourceFunction
		DataStream<PointProtocol> sourceLocation = env.addSource(consumerLocation).setParallelism(kafkaConsumer.locationPartitionSize()).name("source_kafka_location").uid("source_kafka_location");
		DataStream<PointProtocol> sourceRealtime = env.addSource(consumerRealtime).setParallelism(kafkaConsumer.realTimePartitionSize()).name("source_kafka_realtime").uid("source_kafka_realtime");

		//过滤tid=0，速度<200, GPS时间大于当前时间1小时的数据；
		DataStream<PointProtocol> filterLocation = sourceLocation.filter(point -> 0!=point.getTid() && point.getSpeedKmh()<=200 && point.getGpsTime()<(System.currentTimeMillis()+(60 * 60 * 1000))).name("source_location_filter").uid("source_location_filter");
		DataStream<PointProtocol> filterRealtime = sourceRealtime.filter(point -> 0!=point.getTid() && point.getSpeedKmh()<=200 && point.getGpsTime()<(System.currentTimeMillis()+(60 * 60 * 1000))).name("source_realtime_filter").uid("source_realtime_filter");

		//过滤指定tid
		String tids = config.get("tids");
		if(null != tids){
			Set<Long> tidSet = Arrays.stream(tids.split(",")).map(Long::parseLong).collect(Collectors.toSet());
			filterLocation = filterLocation.filter(pointProtocol -> tidSet.contains(pointProtocol.getTid())).name("location_filter_tid").uid("location_filter_tid");
			filterRealtime = filterRealtime.filter(pointProtocol -> tidSet.contains(pointProtocol.getTid())).name("realtime_filter_tid").uid("realtime_filter_tid");
		}

		//指定事件时间和水印策略
		filterLocation = filterLocation.assignTimestampsAndWatermarks(WatermarkStrategy
				.<PointProtocol>forGenerator((ctx) -> new BoundedOutOfOrdernessWatermarks<>(Duration.ofSeconds(10), Duration.ofDays(1) ))
				.withTimestampAssigner((element, recordTimestamp) -> element.getGpsTime())
				.withIdleness(Duration.ofMinutes(5)));

		//指定事件时间和水印策略
		filterRealtime = filterRealtime.assignTimestampsAndWatermarks(WatermarkStrategy
				.<PointProtocol>forGenerator((ctx) -> new BoundedOutOfOrdernessWatermarks<>(Duration.ofSeconds(10), Duration.ofDays(1) ))
				.withTimestampAssigner((element, recordTimestamp) -> element.getGpsTime())
				.withIdleness(Duration.ofMinutes(5)));

		//指定TransformFunction
		SingleOutputStreamOperator<DF_OuterStatisticData> tripOutput = filterRealtime.union(filterLocation)
			.keyBy(PointProtocol::getTid)
			.window(GlobalWindows.create())
			.trigger(new TripWindowTrigger(jobConfig))
			.evictor(new TripWindowEvictor(jobConfig))
			.process(new TripWindowFunction(eventOutputTag))
			.name("trip_statistic_transform")
			.uid("trip_statistic_transform");

//		tripOutput.print();
		//指定SinkFunction
		//行程统计输出到Kafka
		tripOutput.addSink(producerTripStatistic).name("sink_kafka_tripStatistic").uid("sink_kafka_tripStatistic");

		//行程事件输出到Kafka
		tripOutput.getSideOutput(eventOutputTag).addSink(producerTripEvent).name("sink_kafka_tripEvent").uid("sink_kafka_tripEvent");

		//执行Program
		env.execute("Trip_Analysis_Streaming");
		logger.error(">>>>>>实时驾驶行为分析程序开始运行>>>>>>");
	}

}
