package cn.linjianhui.flink.sample;

import cn.linjianhui.flink.sample.model.HourlyAccumulator;
import cn.linjianhui.flink.sample.model.OmsOrder;
import cn.linjianhui.flink.sample.util.ParamUtils;
import cn.linjianhui.flink.sample.util.Utils;
import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.io.jdbc.JDBCAppendTableSink;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.ContinuousProcessingTimeTrigger;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import org.apache.flink.types.Row;
import org.apache.flink.util.Collector;

import java.math.BigDecimal;
import java.time.*;
import java.util.Properties;

public class HourlyPayAmountJob {
    private static final TypeInformation[] FIELD_TYPES = new TypeInformation[]{
            BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.BIG_DEC_TYPE_INFO
    };

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = Utils.getStreamExecutionEnvironment();

        Properties props = ParamUtils.getKafkaConsumerProperty(args, "hourly-pay", "earliest");

        DataStream<String> sourceStream = env.addSource(new FlinkKafkaConsumer011<>("OMS-Order", new SimpleStringSchema(), props));

        DataStream<OmsOrder> payOrderStream = sourceStream
                .map((MapFunction<String, OmsOrder>) s -> JSON.parseObject(s, OmsOrder.class))
                .filter((FilterFunction<OmsOrder>) omsOrder -> {
                    long currentDayTimestamp = LocalDate.now().atStartOfDay(ZoneOffset.ofHours(8)).toInstant().toEpochMilli();
                    return omsOrder.getORDERSTATUS() == 0 && omsOrder.getCREATEDATE() >= currentDayTimestamp;
                });

        DataStream<Row> resultStream = payOrderStream
                .keyBy("orderHour")
                .window(TumblingProcessingTimeWindows.of(Time.days(1), Time.hours(-8)))
                .trigger(ContinuousProcessingTimeTrigger.of(Time.seconds(60)))
                .aggregate(new AggregateFunction<OmsOrder, HourlyAccumulator, HourlyAccumulator>() {

                    @Override
                    public HourlyAccumulator createAccumulator() {
                        return new HourlyAccumulator();
                    }

                    @Override
                    public HourlyAccumulator add(OmsOrder order, HourlyAccumulator accumulator) {
                        if(!accumulator.containsOrder(order.getORDERCODE())) {
                            accumulator.setOrderHour(order.getOrderHour());
                            accumulator.addOrderId(order.getORDERCODE());
                            accumulator.addOrderAmount(new BigDecimal(order.getNETAMT()));
                        }
                        return accumulator;
                    }

                    @Override
                    public HourlyAccumulator getResult(HourlyAccumulator result) {
                        return result;
                    }

                    @Override
                    public HourlyAccumulator merge(HourlyAccumulator acc1, HourlyAccumulator acc2) {
                        acc1.addOrderAmount(acc2.getOrderAmount());
                        return acc1;
                    }
                }, new ProcessWindowFunction<HourlyAccumulator, Row, Tuple, TimeWindow>() {
                    private ValueState<BigDecimal> orderAmountState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        orderAmountState = this.getRuntimeContext().getState(new ValueStateDescriptor<>(
                                "hourly-order-amount-state", BigDecimal.class));
                    }

                    @Override
                    public void close() throws Exception {
                        orderAmountState.clear();
                    }

                    @Override
                    public void process(Tuple tuple, Context context, Iterable<HourlyAccumulator> iterable, Collector<Row> collector) throws Exception {
                        HourlyAccumulator acc = iterable.iterator().next();
                        if(orderAmountState == null || !acc.getOrderAmount().equals(orderAmountState.value())) {
                            collector.collect(Row.of(LocalDate.now().toString(), acc.getOrderHour(),
                                    acc.getOrderAmount().setScale(4, BigDecimal.ROUND_HALF_EVEN)));
                            orderAmountState.update(acc.getOrderAmount());
                        }
                    }
                });

        JDBCAppendTableSink sink = JDBCAppendTableSink.builder()
                .setDrivername("com.mysql.cj.jdbc.Driver")
                .setDBUrl("jdbc:mysql://192.168.8.251:3307/rbac?characterEncoding=utf-8&serverTimezone=GMT&autoReconnect=true")
                .setUsername("root")
                .setPassword("qwe321")
                .setQuery("INSERT INTO all_order_amount VALUES (?, ?, ?) ON DUPLICATE KEY UPDATE " +
                        "statistical_date = values(statistical_date), hour = values(hour), order_amount = values(order_amount)")
                .setParameterTypes(FIELD_TYPES)
                .setBatchSize(1)
                .build();
        sink.emitDataStream(resultStream);

//        resultStream.map((MapFunction<Row, String>) Row::toString).print();

        env.execute("Hourly OrderAmount Job");
    }
}
