package cn.linjianhui.flink.sample;

import cn.linjianhui.flink.sample.function.OrderAggregateFunction;
import cn.linjianhui.flink.sample.function.OrderKeyedProcessFunction;
import cn.linjianhui.flink.sample.model.OmsOrder;
import cn.linjianhui.flink.sample.model.OrderAccumulator;
import cn.linjianhui.flink.sample.model.OrderOutputResult;
import cn.linjianhui.flink.sample.util.ParamUtils;
import cn.linjianhui.flink.sample.util.Utils;
import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.ContinuousProcessingTimeTrigger;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;

import java.time.LocalDate;
import java.time.ZoneOffset;
import java.util.Properties;

public class PayOrderJob {

	public static void main(String[] args) throws Exception {
		StreamExecutionEnvironment env = Utils.getStreamExecutionEnvironment();

		Properties props = ParamUtils.getKafkaConsumerProperty(args, "pay-order", "earliest");

		DataStream<String> sourceStream = env.addSource(new FlinkKafkaConsumer011<>("OMS-Order", new SimpleStringSchema(), props));

		DataStream<OmsOrder> payOrderStream = sourceStream
				.map((MapFunction<String, OmsOrder>) s -> JSON.parseObject(s, OmsOrder.class))
				.filter((FilterFunction<OmsOrder>) omsOrder -> {
					long currentDayTimestamp = LocalDate.now().atStartOfDay(ZoneOffset.ofHours(8)).toInstant().toEpochMilli();
					return (omsOrder.getORDERSTATUS() == 3 || omsOrder.getORDERSTATUS() == 10)
							&& omsOrder.getLASTMODIFYTIME() >= currentDayTimestamp;
				});

		DataStream<OrderAccumulator> siteAggStream = payOrderStream
				.keyBy("ORDERSOURCE")
				.window(TumblingProcessingTimeWindows.of(Time.days(1), Time.hours(-8)))
				.trigger(ContinuousProcessingTimeTrigger.of(Time.seconds(1)))
				.aggregate(new OrderAggregateFunction());

		DataStream<OrderOutputResult> siteResultStream = siteAggStream.keyBy("orderSource").process(new OrderKeyedProcessFunction());

//		siteResultStream.map((MapFunction<OrderOutputResult, String>) JSON::toJSONString).print();

		FlinkJedisPoolConfig jedisPoolConfig = ParamUtils.getJedisPoolConfig(args);

		siteResultStream.addSink(new RedisSink<>(jedisPoolConfig, new RedisMapper<OrderOutputResult>() {
			@Override
			public RedisCommandDescription getCommandDescription() {
				return new RedisCommandDescription(RedisCommand.HSET, "flink-sample-pay_order");
			}

			@Override
			public String getKeyFromData(OrderOutputResult output) {
				return String.valueOf(output.getOrderSource());
			}

			@Override
			public String getValueFromData(OrderOutputResult output) {
				return JSON.toJSONString(output);
			}
		}));

		env.execute("OmsOrder analysis Job");
	}
}
