package com.atguigu.app.dws;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.app.func.DimAsyncFunction;
import com.atguigu.bean.TestBean;
import com.atguigu.util.MyClickHouseUtil;
import com.atguigu.util.DateFormatUtil;
import com.atguigu.util.KafkaUtil;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;

import org.apache.flink.streaming.api.datastream.AsyncDataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.AllWindowFunction;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.util.Collector;

import java.time.Duration;
import java.util.concurrent.TimeUnit;

public class DwsTestTestExam {
    public static void main(String[] args) throws Exception {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */

        // TODO 3 读取kafka对应主题的数据dwd_trade_order_detail
        String topicName = "dwd_test_test_exam";
        String groupID = "dws_test_test_exam";
        DataStreamSource<String> testStream = env.addSource(KafkaUtil.getKafkaConsumer(topicName, groupID));

        // TODO 4 转换结构
        // TODO 5 过滤掉不完整的数据
        SingleOutputStreamOperator<JSONObject> jsonObjStream = testStream.flatMap(new FlatMapFunction<String, JSONObject>() {
            @Override
            public void flatMap(String value, Collector<JSONObject> out) throws Exception {
                JSONObject jsonObject = JSON.parseObject(value);
                String userId = jsonObject.getString("user_id");
                if (userId != null) {
                    out.collect(jsonObject);
                }
            }
        });


        // TODO 6 按照订单详情分组
        KeyedStream<JSONObject, String> keyedStream = jsonObjStream.keyBy(new KeySelector<JSONObject, String>() {
            @Override
            public String getKey(JSONObject value) throws Exception {
                return value.getString("paper_id");
            }
        });



        SingleOutputStreamOperator<TestBean> beanStream = keyedStream.map(new MapFunction<JSONObject, TestBean>() {
            @Override
            public TestBean map(JSONObject jsonObject) throws Exception {
                return TestBean.builder()
                        .testPersonCount(1L)
                        .avgScore(jsonObject.getDouble("score"))
                        .avgDuringTime(jsonObject.getDouble("duration_sec"))
                        .ts(jsonObject.getLong("ts")*1000)
                        .build();
            }
        });

        SingleOutputStreamOperator<TestBean> beanWithWatermarkStream = beanStream.assignTimestampsAndWatermarks(WatermarkStrategy.<TestBean>forBoundedOutOfOrderness(Duration.ofSeconds(2L))
                .withTimestampAssigner(new SerializableTimestampAssigner<TestBean>() {
                    @Override
                    public long extractTimestamp(TestBean testBean, long l) {
                        return testBean.getTs();
                    }
                }));


        SingleOutputStreamOperator<TestBean> reduceBeanStream = beanWithWatermarkStream.windowAll(TumblingEventTimeWindows.of(Time.seconds(10))).reduce(new ReduceFunction<TestBean>() {
            @Override
            public TestBean reduce(TestBean testBean, TestBean t1) throws Exception {
                testBean.setTestPersonCount(testBean.getTestPersonCount() + t1.getTestPersonCount());
                testBean.setAvgDuringTime((testBean.getAvgDuringTime()*testBean.getTestPersonCount() + t1.getAvgDuringTime()) / (testBean.getTestPersonCount() + t1.getTestPersonCount()));
                testBean.setAvgScore((testBean.getAvgScore()*testBean.getTestPersonCount() + t1.getAvgScore()) / (testBean.getTestPersonCount() + t1.getTestPersonCount()));

                return testBean;
            }
        }, new AllWindowFunction<TestBean, TestBean, TimeWindow>() {
            @Override
            public void apply(TimeWindow timeWindow, Iterable<TestBean> iterable, Collector<TestBean> collector) throws Exception {
                TestBean testBean = iterable.iterator().next();
                testBean.setStt(DateFormatUtil.toYmdHms(timeWindow.getStart()));
                testBean.setEdt(DateFormatUtil.toYmdHms(timeWindow.getEnd()));
                testBean.setTs(System.currentTimeMillis());
                collector.collect(testBean);
            }
        });
        reduceBeanStream.print("reduce>>");



        /*// 关联1级标签
        //{"name":"图书、音像、电子书刊","id":"1"}
        SingleOutputStreamOperator<TradeSkuOrderBean> resultBeanStream = AsyncDataStream.unorderedWait(category2BeanStream, new DimAsyncFunction<TradeSkuOrderBean>("DIM_BASE_CATEGORY1") {
            @Override
            public String getKey(TradeSkuOrderBean input) {
                return input.getCategory1Id();
            }

            @Override
            public void join(TradeSkuOrderBean input, JSONObject obj) {
                input.setCategory1Name(obj.getString("name"));
            }
        }, 100, TimeUnit.SECONDS);

        resultBeanStream.print("result>>>");

        // TODO 10 写出数据到clickHouse中
        resultBeanStream.addSink(ClickHouseUtil.getJdbcSink("insert into dws_trade_sku_order_window values(?,?,?,?,?," +
                "?,?,?,?,?," +
                "?,?,?,?,?," +
                "?,?,?,?)"));*/

        // TODO 11 执行任务
        env.execute();
    }
}
