package cn.linjianhui.flink.sample;

import cn.linjianhui.flink.sample.model.OmsOrderDetail;
import cn.linjianhui.flink.sample.model.WareAccumulator;
import cn.linjianhui.flink.sample.util.ParamUtils;
import com.alibaba.fastjson.JSON;
import cn.linjianhui.flink.sample.util.Utils;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.ContinuousProcessingTimeTrigger;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
import org.apache.flink.util.StringUtils;

import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.ZoneOffset;
import java.util.Properties;

public class WareCategoryJob {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = Utils.getStreamExecutionEnvironment();

        Properties props = ParamUtils.getKafkaConsumerProperty(args, "total-ware-category", "earliest");

        DataStream<String> sourceStream = env.addSource(new FlinkKafkaConsumer011<>("OMS-Order-Ware", new SimpleStringSchema(), props));

        DataStream<OmsOrderDetail> orderDetailStream = sourceStream
                .map((MapFunction<String, OmsOrderDetail>) s -> JSON.parseObject(s, OmsOrderDetail.class))
                .filter((FilterFunction<OmsOrderDetail>) detail -> {
                    long currentDayTimestamp = LocalDate.now().atStartOfDay(ZoneOffset.ofHours(8)).toInstant().toEpochMilli();
                    return detail.getPAYMENTDATE() >= currentDayTimestamp;
                });

        DataStream<WareAccumulator> outputStream = orderDetailStream
                .keyBy("VirtualCategory")
                .window(TumblingProcessingTimeWindows.of(Time.days(1), Time.hours(-8)))
                .trigger(ContinuousProcessingTimeTrigger.of(Time.seconds(10)))
                .aggregate(new AggregateFunction<OmsOrderDetail, WareAccumulator, WareAccumulator>() {
                    @Override
                    public WareAccumulator createAccumulator() {
                        return new WareAccumulator();
                    }

                    @Override
                    public WareAccumulator add(OmsOrderDetail detail, WareAccumulator accumulator) {
                        if(StringUtils.isNullOrWhitespaceOnly(accumulator.getVirtualCategory())) {
                            accumulator.setVirtualCategory(detail.getVirtualCategory());
                        }
                        accumulator.addQty(detail.getQTY());
                        accumulator.addSalesAmount(new BigDecimal(detail.getAmount()));
                        return accumulator;
                    }

                    @Override
                    public WareAccumulator getResult(WareAccumulator result) {
                        return result;
                    }

                    @Override
                    public WareAccumulator merge(WareAccumulator acc1, WareAccumulator acc2) {
                        acc1.addQty(acc2.getQty());
                        acc1.addSalesAmount(acc2.getSalesAmount());
                        return acc1;
                    }
                });

        FlinkJedisPoolConfig jedisPoolConfig = ParamUtils.getJedisPoolConfig(args);
        outputStream.addSink(new RedisSink<>(jedisPoolConfig, new RedisMapper<WareAccumulator>() {
            @Override
            public RedisCommandDescription getCommandDescription() {
                return new RedisCommandDescription(RedisCommand.HSET, "flink-sample-ware_classify");
            }

            @Override
            public String getKeyFromData(WareAccumulator accumulator) {
                return accumulator.getVirtualCategory();
            }

            @Override
            public String getValueFromData(WareAccumulator accumulator) {
                return JSON.toJSONString(accumulator);
            }
        }));

        env.execute("WareCategory Job");
    }
}
