package com.atguigu.edu.realtime.dws.app;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.common.base.BaseApp;
import com.atguigu.edu.realtime.common.bean.ExamScorePartitionBean;
import com.atguigu.edu.realtime.common.constant.Constant;
import com.atguigu.edu.realtime.common.function.BeanToJsonStrMapFunction;
import com.atguigu.edu.realtime.common.util.DateFormatUtil;
import com.atguigu.edu.realtime.common.util.FlinkSinkUtil;
import com.atguigu.edu.realtime.common.util.HBaseUtil;
import com.atguigu.edu.realtime.common.util.SqlUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;
import org.apache.hadoop.hbase.client.Connection;

public class DwsExamScorePartitionWindow extends BaseApp {

    public static void main(String[] args) {
        new DwsExamScorePartitionWindow().start(13002,4,"DwsExamScorePartitionWindow", Constant.TOPIC_DWD_EXAMINATION_ANSWER_DETAIL);
    }
    @Override
    public void handle(StreamExecutionEnvironment env, DataStreamSource<String> kafkaStrDS) {
        SingleOutputStreamOperator<JSONObject> jsonStream = kafkaStrDS.map(JSON::parseObject);

        SingleOutputStreamOperator<JSONObject> wmStream = jsonStream.assignTimestampsAndWatermarks(WatermarkStrategy.<JSONObject>forMonotonousTimestamps().withTimestampAssigner(
                (v, ts) -> v.getLong("ts") * 1000
        ));


        SingleOutputStreamOperator<ExamScorePartitionBean> beanStream = wmStream.process(

                new ProcessFunction<JSONObject, ExamScorePartitionBean>() {

                    @Override
                    public void processElement(JSONObject jsonObject, ProcessFunction<JSONObject, ExamScorePartitionBean>.Context context, Collector<ExamScorePartitionBean> collector) throws Exception {
                        ExamScorePartitionBean build = new ExamScorePartitionBean().toBuilder().build();
                        build.setPaperId(jsonObject.getString("paper_id"));
                        build.setTs(jsonObject.getLong("ts"));
                        if (jsonObject.getDouble("score") >= 80L) {
                            build.setUp80(1L);
                        } else if (jsonObject.getDouble("score") >= 60L) {
                            build.setUp60(1L);
                        } else if (jsonObject.getDouble("score") >= 40L) {
                            build.setUp40(1L);
                        } else {
                            build.setUp0(1L);
                        }
                        collector.collect(build);
                    }
                }
        );

        SingleOutputStreamOperator<ExamScorePartitionBean> resultStream = beanStream.keyBy(v -> v.getPaperId()).window(
                TumblingEventTimeWindows.of(Time.seconds(10))
        ).reduce(
                new ReduceFunction<ExamScorePartitionBean>() {
                    @Override
                    public ExamScorePartitionBean reduce(ExamScorePartitionBean examScorePartitionBean, ExamScorePartitionBean t1) throws Exception {
                        examScorePartitionBean.setUp0(examScorePartitionBean.getUp0() + t1.getUp0());
                        examScorePartitionBean.setUp20(examScorePartitionBean.getUp20() + t1.getUp20());
                        examScorePartitionBean.setUp40(examScorePartitionBean.getUp40() + t1.getUp40());
                        examScorePartitionBean.setUp80(examScorePartitionBean.getUp80() + t1.getUp80());
                        return examScorePartitionBean;
                    }
                },
                new WindowFunction<ExamScorePartitionBean, ExamScorePartitionBean, String, TimeWindow>() {
                    @Override
                    public void apply(String s, TimeWindow timeWindow, Iterable<ExamScorePartitionBean> iterable, Collector<ExamScorePartitionBean> collector) throws Exception {
                        ExamScorePartitionBean next = iterable.iterator().next();
                        next.setStt(DateFormatUtil.tsToDateTime(timeWindow.getStart()));
                        next.setEdt(DateFormatUtil.tsToDateTime(timeWindow.getEnd()));
                        next.setCurDate(DateFormatUtil.tsToDate(timeWindow.getStart()));
                        collector.collect(next);
                    }
                }
        );



        SingleOutputStreamOperator<ExamScorePartitionBean> withPaperNameInfoDS = resultStream.map(
                new RichMapFunction<ExamScorePartitionBean, ExamScorePartitionBean>() {
                    private Connection hbaseConn;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        hbaseConn = HBaseUtil.getHbaseConnection();
                    }

                    @Override
                    public void close() throws Exception {
                        HBaseUtil.closeHBaseConnection(hbaseConn);
                    }

                    @Override
                    public ExamScorePartitionBean map(ExamScorePartitionBean orderBean) throws Exception {
                        //根据流中的对象获取要关联的维度的主键
                        String paperId = orderBean.getPaperId();
                        //根据维度的主键获取对应的维度对象
                        JSONObject dimJsonObj = HBaseUtil.readRow(hbaseConn, Constant.HBASE_NAMESPACE, "dim_test_paper", paperId, JSONObject.class);
                        //将维度对象相关的维度属性补充到流中的对象上
                        //id,spu_id,price,sku_name,sku_desc,weight,tm_id,category3_id,sku_default_img,is_sale,create_time
                        orderBean.setPaperName(dimJsonObj.getString("paper_title"));

                        return orderBean;
                    }
                }
        );
        withPaperNameInfoDS.print();



        withPaperNameInfoDS.map(new BeanToJsonStrMapFunction())
        .sinkTo(FlinkSinkUtil.getDorisSink("dws_exam_score_partition_window"));
    }
}
