package realtime.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import realtime.bean.UserReview;
import realtime.func.AsyncDIMFunction;
import realtime.util.ClickHouseUtil;
import realtime.util.DateFormatUtil;
import realtime.util.MyKafkaUtil;

import java.math.BigDecimal;
import java.nio.channels.DatagramChannel;
import java.time.Duration;
import java.util.concurrent.TimeUnit;

/**
 * @author MengX
 * @create 2023/3/27 11:25:15
 */
public class DwsUserReviewInfoWindow {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        //设置状态后端

//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(10000L);
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/dws/220926");

        //设置HDFS用户信息
        //System.setProperty("HADOOP_USER_NAME", "atguigu");

        String topic = "dwd_review_info";
        String groupId = "dws_review";
        DataStreamSource<String> kafkaDS = env.addSource(MyKafkaUtil.getFlinkKafkaConsumer(topic, groupId));
        SingleOutputStreamOperator<JSONObject> watermakeDS = kafkaDS.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                if (value != null) {
                    out.collect(JSON.parseObject(value));
                }
            }
        }).assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forBoundedOutOfOrderness(Duration.ofSeconds(2)).withTimestampAssigner(new SerializableTimestampAssigner<JSONObject>() {
            @Override
            public long extractTimestamp(JSONObject element, long recordTimestamp) {
                return element.getLong("create_time");
            }
        }));
        SingleOutputStreamOperator<UserReview> flatMapDS = watermakeDS.keyBy(new KeySelector<JSONObject, Tuple2<String,String>>() {
            @Override
            public Tuple2<String, String> getKey(JSONObject value) throws Exception {
                return Tuple2.of(value.getString("course_id"),value.getString("user_id"));
            }
        }).flatMap(new RichFlatMapFunction<JSONObject, UserReview>() {
            ValueState<String> valueState;

            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<String> valueStateDescriptor = new ValueStateDescriptor<>("userdl", String.class);
                StateTtlConfig build = new StateTtlConfig.Builder(Time.seconds(5))
                        .setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite)
                        .build();
                valueStateDescriptor.enableTimeToLive(build);
                valueState = getRuntimeContext().getState(valueStateDescriptor);
            }

            @Override
            public void flatMap(JSONObject value, Collector<UserReview> out) throws Exception {
                String lastct = valueState.value();
                String curt = value.getString("create_time");
                String courseId = value.getString("course_id");
                long reviewStars = 0L;
                long fiveStarts = 0L;
                long uvc = 0L;
                if (lastct == null) {
                    uvc = 1L;
                    reviewStars = value.getLong("review_stars");
                    if (reviewStars == 5) fiveStarts = 1L;
                    valueState.update(curt);
                } else if (!curt.equals(lastct)) {
                    uvc = 1L;
                    reviewStars = value.getLong("review_stars");
                    if (reviewStars == 5) fiveStarts = 1L;
                    valueState.update(curt);
                }
                UserReview userReview = UserReview.builder()
                        .courseId(courseId)
                        .reviewUc(uvc)
                        .reviewStart(reviewStars)
                        .userFiveStart(fiveStarts)
                        .build();
                out.collect(userReview);
            }
        });
        SingleOutputStreamOperator<UserReview> reduce = flatMapDS.keyBy(UserReview::getCourseId).window(TumblingEventTimeWindows.of(org.apache.flink.streaming.api.windowing.time.Time.seconds(10))).reduce(new ReduceFunction<UserReview>() {
            @Override
            public UserReview reduce(UserReview value1, UserReview value2) throws Exception {
                value1.setReviewUc(value1.getReviewUc() + value2.getReviewUc());
                value1.setReviewStart(value1.getReviewStart() + value2.getReviewStart());
                value1.setUserFiveStart(value1.getUserFiveStart() + value2.getUserFiveStart());
                return value1;
            }
        }, new WindowFunction<UserReview, UserReview, String, TimeWindow>() {
            @Override
            public void apply(String s, TimeWindow window, Iterable<UserReview> input, Collector<UserReview> out) throws Exception {
                UserReview next = input.iterator().next();
                next.setUserAvg(BigDecimal.valueOf(next.getReviewStart() / next.getReviewUc()));
                next.setPositiveReview(BigDecimal.valueOf(next.getUserFiveStart() / next.getReviewUc()));
                String start = DateFormatUtil.toYmdHms(window.getStart());
                String end = DateFormatUtil.toYmdHms(window.getEnd());
                long ts = System.currentTimeMillis();
                next.setStt(start);
                next.setEnt(end);
                next.setTs(ts);
                out.collect(next);
            }
        });
        SingleOutputStreamOperator<UserReview> resultDS = AsyncDataStream.unorderedWait(reduce, new AsyncDIMFunction<UserReview>("DIM_COURSE_INFO") {
            @Override
            public void join(UserReview input, JSONObject dimInfo) {
                input.setCourseName(dimInfo.getString("COURSE_NAME"));
            }

            @Override
            public String getKey(UserReview input) {
                return input.getCourseId();
            }
        }, 60, TimeUnit.SECONDS);

        resultDS.print("resultDS>>>>>>>>");

        resultDS.addSink(ClickHouseUtil.getSinkFunction("insert into dws_user_review_info_window values(?,?,?,?,?,?,?,?)"));
        env.execute("DwsUserReviewInfoWindow");


    }
}
