package cn.linjianhui.flink.sample;

import cn.linjianhui.flink.sample.model.OmsOrderDetail;
import cn.linjianhui.flink.sample.util.ParamUtils;
import com.alibaba.fastjson.JSON;
import cn.linjianhui.flink.sample.util.Utils;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.ContinuousProcessingTimeTrigger;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;

import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.ZoneOffset;
import java.util.Properties;

public class TopNBrandSalesAmountJob {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = Utils.getStreamExecutionEnvironment();

        Properties props = ParamUtils.getKafkaConsumerProperty(args, "topN-sales-brand", "earliest");

        DataStream<String> sourceStream = env.addSource(new FlinkKafkaConsumer011<>("OMS-Order-Ware", new SimpleStringSchema(), props));

        DataStream<Tuple2<String, BigDecimal>> orderDetailStream = sourceStream
                .map((MapFunction<String, OmsOrderDetail>) s -> JSON.parseObject(s, OmsOrderDetail.class))
                .filter((FilterFunction<OmsOrderDetail>) detail -> {
                    long currentDayTimestamp = LocalDate.now().atStartOfDay(ZoneOffset.ofHours(8)).toInstant().toEpochMilli();
                    return detail.getPAYMENTDATE() >= currentDayTimestamp;
                })
                .map((MapFunction<OmsOrderDetail, Tuple2<String, BigDecimal>>) detail ->
                        new Tuple2<>(detail.getBRANDCODE(), new BigDecimal(detail.getAmount())))
                .returns(TypeInformation.of(new TypeHint<Tuple2<String, BigDecimal>>() { }));

        DataStream<Tuple2<String, BigDecimal>> outputStream =  orderDetailStream
                .keyBy(0)
                .window(TumblingProcessingTimeWindows.of(Time.days(1), Time.hours(-8)))
                .trigger(ContinuousProcessingTimeTrigger.of(Time.seconds(10)))
                .aggregate(new AggregateFunction<Tuple2<String, BigDecimal>, Tuple2<String, BigDecimal>, Tuple2<String, BigDecimal>>() {

                    @Override
                    public Tuple2<String, BigDecimal> createAccumulator() {
                        return new Tuple2<>("", BigDecimal.ZERO);
                    }

                    @Override
                    public Tuple2<String, BigDecimal> add(Tuple2<String, BigDecimal> item, Tuple2<String, BigDecimal> tuple2) {
                        tuple2.f0 = item.f0;
                        tuple2.f1 = tuple2.f1.add(item.f1);
                        return tuple2;
                    }

                    @Override
                    public Tuple2<String, BigDecimal> getResult(Tuple2<String, BigDecimal> tuple2) {
                        return tuple2;
                    }

                    @Override
                    public Tuple2<String, BigDecimal> merge(Tuple2<String, BigDecimal> acc1, Tuple2<String, BigDecimal> acc2) {
                        acc1.f1 = acc1.f1.add(acc2.f1);
                        return acc1;
                    }
                });

        FlinkJedisPoolConfig jedisPoolConfig = ParamUtils.getJedisPoolConfig(args);
        outputStream.addSink(new RedisSink<>(jedisPoolConfig, new RedisMapper<Tuple2<String, BigDecimal>>() {
            @Override
            public RedisCommandDescription getCommandDescription() {
                return new RedisCommandDescription(RedisCommand.ZADD, "flink-sample-sales_brand");
            }

            @Override
            public String getKeyFromData(Tuple2<String, BigDecimal> tuple2) {
                return tuple2.f0;
            }

            @Override
            public String getValueFromData(Tuple2<String, BigDecimal> tuple2) {
                return String.valueOf(tuple2.f1);
            }
        }));

        env.execute("Brand SalesAmount Job");
    }

}
