package com.atguigu.edu.realtime.app.dws;

import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.druid.util.StringUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.atguigu.edu.realtime.bean.TrafficPageViewBean;
import com.atguigu.edu.realtime.common.Constant;
import com.atguigu.edu.realtime.func.DimAsyncFunction;
import com.atguigu.edu.realtime.utils.*;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.shaded.guava18.com.google.common.base.CaseFormat;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.async.ResultFuture;
import org.apache.flink.streaming.api.functions.async.RichAsyncFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import redis.clients.jedis.Jedis;

import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import static org.apache.flink.streaming.api.environment.CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION;

public class Dws_TrafficPageViewWindow{

    public static final String ckAndGroupIdAndJobName = "Dwd_TrafficLogUserJump";
    public static final Integer port = 4001;
    // todo 这个地方后期可以优化改为执行传参
    public static final Integer parallelism = 2;

    public static void main(String[] args) {
        /**
            1、设置系统用户
         */
        System.setProperty("HADOOP_USER_NAME", "atguigu");

        /**
            2、创建Stream执行环境
         */
        // 2.1 配置Stream环境配置
        Configuration configuration = new Configuration();
        configuration.setString("pipeline.name", ckAndGroupIdAndJobName);
        configuration.setInteger("rest.port", port);
        // 2.2设置Stream流执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(configuration);
        env.setParallelism(parallelism);    //并行度 设置和kafka的分区数一样
        // checkpoint相关
        env.setStateBackend(new HashMapStateBackend());
        env.enableCheckpointing(3000);  //checkpoint开启并设置周期
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop162:8020/edu/" + ckAndGroupIdAndJobName);
        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(RETAIN_ON_CANCELLATION);

        /**
            2、读取kafka数据
         */
        KafkaSource<String> pageKafkaSource = FlinkSourceUtil.getKafkaSource(ckAndGroupIdAndJobName, Constant.TOPIC_DWD_TRAFFIC_PAGE);
        KafkaSource<String> uniqueVisitorKafkaSource = FlinkSourceUtil.getKafkaSource(ckAndGroupIdAndJobName, Constant.TOPIC_DWD_TRAFFIC_UNIQUE_VISITOR);
        KafkaSource<String> jumpKafkaSource = FlinkSourceUtil.getKafkaSource(ckAndGroupIdAndJobName, Constant.TOPIC_DWD_TRAFFIC_USER_JUMP);

        DataStreamSource<String> pageTrafficLogDS = env.fromSource(pageKafkaSource, WatermarkStrategy.noWatermarks(), "kafka-source");
        DataStreamSource<String> uniqueVisitorDS = env.fromSource(uniqueVisitorKafkaSource, WatermarkStrategy.noWatermarks(), "kafka-source");
        DataStreamSource<String> userJumpDS = env.fromSource(jumpKafkaSource, WatermarkStrategy.noWatermarks(), "kafka-source");

        /**
            3、JsonString 转为对应的Pojo
         */
        SingleOutputStreamOperator<TrafficPageViewBean> pageDS = pageTrafficLogDS.map(
                new MapFunction<String, TrafficPageViewBean>() {
                    @Override
                    public TrafficPageViewBean map(String jsonStr) throws Exception {
                        JSONObject jsonObject = JSON.parseObject(jsonStr);
                        JSONObject commonJsonObj = jsonObject.getJSONObject("common");
                        JSONObject pageJsonObj = jsonObject.getJSONObject("page");
                        Long ts = jsonObject.getLong("ts");

                        TrafficPageViewBean pageViewBean = new TrafficPageViewBean(
                                "",
                                "",
                                commonJsonObj.getString("sc"),
                                "",
                                commonJsonObj.getString("is_new"),
                                0L,
                                0L,
                                1L,
                                pageJsonObj.getLong("during_time"),
                                0L,
                                ts
                        );
                        String lastPageId = pageJsonObj.getString("last_page_id");
                        if (StringUtils.isEmpty(lastPageId)) {
                            pageViewBean.setSvCt(1L);
                        }

                        return pageViewBean;
                    }
                }
        );

        SingleOutputStreamOperator<TrafficPageViewBean> uvDS = uniqueVisitorDS.map(
                new MapFunction<String, TrafficPageViewBean>() {
                    @Override
                    public TrafficPageViewBean map(String jsonStr) throws Exception {
                        JSONObject jsonObject = JSON.parseObject(jsonStr);
                        JSONObject commonJsonObj = jsonObject.getJSONObject("common");
                        Long ts = jsonObject.getLong("ts");
                        TrafficPageViewBean uvViewBean = new TrafficPageViewBean(
                                "",
                                "",
                                commonJsonObj.getString("sc"),
                                "",
                                commonJsonObj.getString("is_new"),
                                1L,
                                0L,
                                0L,
                                0L,
                                0L,
                                ts
                        );
                        return uvViewBean;
                    }
                }
        );

        SingleOutputStreamOperator<TrafficPageViewBean> ujDS = userJumpDS.map(
                new MapFunction<String, TrafficPageViewBean>() {
                    @Override
                    public TrafficPageViewBean map(String jsonStr) throws Exception {
                        JSONObject jsonObject = JSON.parseObject(jsonStr);
                        JSONObject commonJsonObj = jsonObject.getJSONObject("common");
                        Long ts = jsonObject.getLong("ts");
                        TrafficPageViewBean ujViewBean = new TrafficPageViewBean(
                                "",
                                "",
                                commonJsonObj.getString("sc"),
                                "",
                                commonJsonObj.getString("is_new"),
                                0L,
                                0L,
                                0L,
                                0L,
                                1L,
                                ts
                        );
                        return ujViewBean;
                    }
                }
        );

        /**
            4、Union三个流数据，并且指明时间时间水印
         */
        SingleOutputStreamOperator<TrafficPageViewBean> unionAndWatermarkDS = pageDS.union(
                uvDS,
                ujDS
        ).assignTimestampsAndWatermarks(
                WatermarkStrategy
                        .<TrafficPageViewBean>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                        .withTimestampAssigner((data, timestamp) -> data.getTs())

        );

        /**
            5、按照统计维度分组、开窗、聚合
         */
        SingleOutputStreamOperator<TrafficPageViewBean> keyedWindowReduceStream = unionAndWatermarkDS.keyBy(
                new KeySelector<TrafficPageViewBean, Tuple2<String, String>>() {

                    @Override
                    public Tuple2<String, String> getKey(TrafficPageViewBean trafficPageViewBean) throws Exception {
                        return Tuple2.of(
                                trafficPageViewBean.getSourceID(),
                                trafficPageViewBean.getIsNew()
                        );
                    }
                }
        )
                .window(TumblingEventTimeWindows.of(Time.seconds(10)))
                .reduce(
                        new ReduceFunction<TrafficPageViewBean>() {
                            @Override
                            public TrafficPageViewBean reduce(TrafficPageViewBean value1, TrafficPageViewBean value2) throws Exception {
                                value1.setPvCt(value1.getPvCt() + value2.getPvCt());
                                value1.setDurSum(value1.getDurSum() + value2.getDurSum());
                                value1.setSvCt(value1.getSvCt() + value2.getSvCt());
                                value1.setUvCt(value1.getUvCt() + value2.getUvCt());
                                value1.setUjCt(value1.getUjCt() + value2.getUjCt());
                                return value1;
                            }
                        }
                        ,
                        new ProcessWindowFunction<TrafficPageViewBean, TrafficPageViewBean, Tuple2<String, String>, TimeWindow>() {

                            @Override
                            public void process(Tuple2<String, String> stringStringTuple2, Context ctx, Iterable<TrafficPageViewBean> elements, Collector<TrafficPageViewBean> out) throws Exception {
                                TrafficPageViewBean bean = elements.iterator().next();

                                // 补充窗口信息
                                bean.setStt(AtguiguUtil.toDatTime(ctx.window().getStart()));
                                bean.setEdt(AtguiguUtil.toDatTime(ctx.window().getEnd()));

                                // 把 ts 改成: 数据的统计时间
                                bean.setTs(System.currentTimeMillis());
                                out.collect(bean);
                            }
                        }
                );

        // TODO 可以封装的，暂时不想封装
        SingleOutputStreamOperator<TrafficPageViewBean> outSingleOutputStreamOperator = AsyncDataStream.unorderedWait(
                keyedWindowReduceStream,
                new RichAsyncFunction<TrafficPageViewBean, TrafficPageViewBean>() {

                    private ThreadPoolExecutor threadPool;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        // 获取线程池
                        threadPool = ThreadPoolUtil.getThreadPool();
                    }

                    @Override
                    public void asyncInvoke(TrafficPageViewBean trafficPageViewBean, ResultFuture<TrafficPageViewBean> resultFuture) throws Exception {

                        threadPool.submit(new Runnable() {
                            @Override
                            public void run() {
                                try{
                                    Jedis redisClient = RedisUtil.getRedisClient();  // 一个线程获取一个客户端
                                    DruidPooledConnection conn = DruidDSUtil.getPhoenixConn();
                                    JSONObject dim = null;

                                            //todo 加redis缓存。维度信息很少变化，没必要每条数据都去phoenix查，太耗性能
                                    // todo 另外当维度信息改变时记得去把redis缓存的记录先删除掉
                                    //1、先从redis读取维度信息
                                    String tableName = "dim_base_source";
                                    String key = tableName + ":" + trafficPageViewBean.getSourceID();
                                    String json = redisClient.get(key);
                                    if (json != null) {
                                        dim = JSON.parseObject(json);
                                    }
                                    //2、如果缓存没有读到, 去数据库查
                                    if (dim == null) {
                                        ArrayList<TrafficPageViewBean> result = new ArrayList<>();
                                        TrafficPageViewBean t = new TrafficPageViewBean();
                                        // 拼接查询sql
                                        String sql = "select * from " + tableName + " where id = ?";
                                        // 获取预处理对象
                                        PreparedStatement ps = conn.prepareStatement(sql);
                                        // 给预处理对象的sql占位符赋值
                                        ps.setString(1, trafficPageViewBean.getSourceID());
                                        // 执行sql语句
                                        ResultSet resultSet = ps.executeQuery();
                                        // 可以获取表的元数据: 比如列名, 列的数量, 列的类型 ...
                                        ResultSetMetaData metaData = resultSet.getMetaData();

                                        while (resultSet.next()) {
                                            for (int i = 1; i <= metaData.getColumnCount(); i++) {
                                                String name = metaData.getColumnLabel(i);
                                                Object value = resultSet.getObject(i);
                                                BeanUtils.setProperty(TrafficPageViewBean.class, name, value);
                                            }
                                            result.add(t);
                                        }


                                    }


                                }catch (Exception e) {
                                    throw new RuntimeException(e);
                                }
                            }
                        });

                    }
                },
                60,
                TimeUnit.SECONDS
        );

        outSingleOutputStreamOperator.print();
//        //TODO 14.将聚合的结果写到Clickhouse表中
//        outSingleOutputStreamOperator.addSink(
//                FlinkSinkUtil.getKafkaSink()
//        );
//        env.execute();
    }
}
