package realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import realtime.bean.TradeOrderCountWithCource;
import realtime.func.AsyncDIMFunction;
import realtime.util.ClickHouseUtil;
import realtime.util.DateFormatUtil;
import realtime.util.MyKafkaUtil;

import java.math.BigDecimal;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

/**
 * @author MengX
 * @create 2023/3/25 13:55:05
 */
public class DWS_CourceOrderDetailWindow {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //设置状态后端

//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(10000L);
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/dws/220926");

        //设置HDFS用户信息
        //System.setProperty("HADOOP_USER_NAME", "atguigu");

        //kafka读取数据
        String topic = "dwd_trade_order_detail";
        String groupId = "dwd_trade_order_detail_window";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId));
        SingleOutputStreamOperator<TradeOrderCountWithCource> beanDs = kafkaDS.flatMap(new FlatMapFunction<String, JSONObject>() {
                    @Override
                    public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                        if (value != null) {
                            out.collect(JSON.parseObject(value));
                        }
                    }
                }).assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
                    @Override
                    public long extractTimestamp(JSONObject element, long recordTimestamp) {
                        return element.getLong("create_time");
                    }
                })).keyBy(line -> line.getString("user_id"))
                //转换数据写入javabean
                .map(new RichMapFunction<JSONObject, TradeOrderCountWithCource>() {
                    ValueState<String> valueState;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        ValueStateDescriptor<String> vaD = new ValueStateDescriptor<>("userct", String.class);
                        StateTtlConfig build = new StateTtlConfig.Builder(Time.seconds(5))
                                .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                                .build();
                        vaD.enableTimeToLive(build);
                        valueState = getRuntimeContext().getState(vaD);
                    }

                    @Override
                    public TradeOrderCountWithCource map(JSONObject value) throws Exception {
                        String last = valueState.value();
                        String curt = value.getString("create_time");
                        BigDecimal originAmount = value.getBigDecimal("origin_amount");
                        String courseId = value.getString("course_id");
                        long uc = 0L;
                        if (last == null) {
                            uc = 1L;
                            valueState.update(curt);
                        } else if (!curt.equals(last)) {
                            uc = 1L;
                            valueState.update(curt);
                        }

                        return new TradeOrderCountWithCource("", "", courseId,"", uc, originAmount, null);

                    }
                });
        SingleOutputStreamOperator<TradeOrderCountWithCource> courseName = AsyncDataStream.unorderedWait(beanDs, new AsyncDIMFunction<TradeOrderCountWithCource>("DIM_COURSE_INFO") {
            @Override
            public void join(TradeOrderCountWithCource input, JSONObject dimInfo) {
                input.setCourseName(dimInfo.getString("COURSE_NAME"));
            }

            @Override
            public String getKey(TradeOrderCountWithCource input) {
                return input.getCourseId();
            }
        }, 60, TimeUnit.SECONDS);
        SingleOutputStreamOperator<TradeOrderCountWithCource> resultDS = courseName.keyBy(TradeOrderCountWithCource::getCourseId).window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10)))
                .reduce(new ReduceFunction<TradeOrderCountWithCource>() {
                    @Override
                    public TradeOrderCountWithCource reduce(TradeOrderCountWithCource value1, TradeOrderCountWithCource value2) throws Exception {
                        value1.setOrderUc(value1.getOrderUc() + value2.getOrderUc());
                        value1.setOrderAmount(value1.getOrderAmount().add(value2.getOrderAmount()));
                        return value1;
                    }
                }, new WindowFunction<TradeOrderCountWithCource, TradeOrderCountWithCource, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow window, Iterable<TradeOrderCountWithCource> input, Collector<TradeOrderCountWithCource> out) throws Exception {
                        String start = DateFormatUtil.toYmdHms(window.getStart());
                        String end = DateFormatUtil.toYmdHms(window.getEnd());
                        long ts = System.currentTimeMillis();
                        TradeOrderCountWithCource next = input.iterator().next();
                        next.setEnt(end);
                        next.setStt(start);
                        next.setTs(ts);
                        out.collect(next);
                    }
                });

        resultDS.print("resultDS>>>>>>>>>>>>>>>>>>>>>>>>>");
        resultDS.addSink(ClickHouseUtil.getSinkFunction("insert into dws_cource_order_detail_window values(?,?,?,?,?,?,?)"));

        env.execute("DWS_CourceOrderDetailWindow");

    }
}
