package com.retailersv1.ads;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.retailersv1.ads.ben.TradeOrderBean;
import common.functions.DorisMapFunction;
import common.utils.ConfigUtils;
import common.utils.DateFormatUtil;
import common.utils.KafkaUtils;
import lombok.SneakyThrows;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.Date;
import java.util.Iterator;

public class dws_trade_order_window {

    private static final String kafka_botstrap_servers = ConfigUtils.getString("kafka.bootstrap.servers");
    private static final String topic_dwd_trade_order_detail = ConfigUtils.getString("dwd.trade.order.detail");
    @SneakyThrows
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //读取kafka数据
        DataStreamSource<String> kafkaDbSource = env.fromSource(KafkaUtils.buildKafkaSource(
                        kafka_botstrap_servers,
                        topic_dwd_trade_order_detail,
                        new Date().toString(),
                        OffsetsInitializer.earliest()),
                WatermarkStrategy.noWatermarks(), "kafka-mysql-db-source");
//        kafkaDbSource.print(">>>>>1");

        //数据清洗
        SingleOutputStreamOperator<JSONObject> kafkaDbSourceClear = kafkaDbSource.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String s, Collector<JSONObject> collector) throws Exception {
                try {
                    JSONObject jsonObject = JSON.parseObject(s);
                    Long ts = jsonObject.getLong("ts");
                    String user_id = jsonObject.getString("user_id");
                    if (!user_id.isEmpty() && ts > 0) {
                        collector.collect(jsonObject);
                    }
                } catch (Exception e) {
                    throw new RuntimeException(e);
                }
            }
        });
//        kafkaDbSourceClear.print(">>>>>2");

        //分组聚合转化为实体类
        SingleOutputStreamOperator<TradeOrderBean> kafkaDbSourceClearProcess = kafkaDbSourceClear.assignTimestampsAndWatermarks(WatermarkStrategy
                        .<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(5))
                        .withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                            @Override
                            public long extractTimestamp(JSONObject JSONObject, long l) {
                                return JSONObject.getLong("ts") * 1000;
                            }
                        }).withIdleness(Duration.ofSeconds(5)))
                .keyBy(x -> x.getString("user_id"))
                .process(new KeyedProcessFunction<String, JSONObject, TradeOrderBean>() {
                    private ValueState<String> state;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        ValueStateDescriptor<String> state1 = new ValueStateDescriptor<>("state", String.class);
                        state = getRuntimeContext().getState(state1);
                    }

                    @Override
                    public void processElement(JSONObject jsonObject, KeyedProcessFunction<String, JSONObject, TradeOrderBean>.Context context, Collector<TradeOrderBean> collector) throws Exception {
                        Long ts = jsonObject.getLong("ts");
                        ts = ts * 1000;
                        String value = state.value();
                        long ouc = 0L;
                        long onc = 0L;
                        String toDate = DateFormatUtil.tsToDate(ts);
                        if (!toDate.equals(value)) {
                            ouc = 1L;
                            state.update(toDate);
                            if (value == null) {
                                onc = 1L;
                            }
                        }
                        if (ouc > 0) {
                            collector.collect(TradeOrderBean.builder().orderUniqueUserCount(ouc).orderNewUserCount(onc).ts(ts).build());
                        }
                    }
                });
//        kafkaDbSourceClearProcess.print(">>>>>3");

        //添加水位线,开窗,聚合
        SingleOutputStreamOperator<TradeOrderBean> kafkaDbSourceReduce = kafkaDbSourceClearProcess.windowAll(TumblingEventTimeWindows.of(Time.seconds(5)))
                .reduce(new ReduceFunction<TradeOrderBean>() {
                    @Override
                    public TradeOrderBean reduce(TradeOrderBean t1, TradeOrderBean t2) throws Exception {
                        t1.setOrderUniqueUserCount(t1.getOrderUniqueUserCount() + t2.getOrderUniqueUserCount());
                        t1.setOrderNewUserCount(t1.getOrderNewUserCount() + t2.getOrderNewUserCount());
                        return t1;
                    }
                }, new AllWindowFunction<TradeOrderBean, TradeOrderBean, TimeWindow>() {
                    @Override
                    public void apply(TimeWindow timeWindow, Iterable<TradeOrderBean> iterable, Collector<TradeOrderBean> collector) throws Exception {
                        String s1 = DateFormatUtil.tsToDateTime(timeWindow.getStart());
                        String s2 = DateFormatUtil.tsToDateTime(timeWindow.getEnd());
                        String s3 = DateFormatUtil.tsToDate(new Date().getTime());
                        Iterator<TradeOrderBean> iterator = iterable.iterator();
                        while (iterator.hasNext()) {
                            TradeOrderBean next = iterator.next();
                            next.setStt(s1);
                            next.setEdt(s2);
                            next.setCurDate(s3);
                            collector.collect(next);
                        }
                    }
                });
//        kafkaDbSourceReduce.print(">>>>>4");

        //写入到Doris里面
        kafkaDbSourceReduce.map(new DorisMapFunction<>());





        env.execute();
    }
}
