package com.chenxu.gmall.realtime.app.dws;

import com.chenxu.gmall.realtime.app.func.KeywordUDTF;
import com.chenxu.gmall.realtime.bean.KeywordStats;
import com.chenxu.gmall.realtime.common.GmallConstant;
import com.chenxu.gmall.realtime.utils.ClickHouseUtil;
import com.chenxu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Date: 2021/07/20
 * Desc: dWS层 搜索关键字
 */

/*
dwd_page_log数据：我们需要的是其中"item":"图书","item_type":"keyword"的内容；
 page:2> {"common":{"ar":"530000","uid":"36","os":"Android 11.0","ch":"wandoujia","is_new":"0",
       "md":"Xiaomi 10 Pro ","mid":"mid_9","vc":"v2.1.134","ba":"Xiaomi"},"page":{"page_id":"good_list","item":"图书",
       "during_time":7183,"item_type":"keyword","last_page_id":"search"},"displays":[{"display_type":"recommend",
       "item":"1","item_type":"sku_id","pos_id":5,"order":1},{"display_type":"recommend","item":"5",
       "item_type":"sku_id","pos_id":2,"order":2},{"display_type":"query","item":"2","item_type":"sku_id","pos_id":2,
       "order":3},{"display_type":"promotion","item":"5","item_type":"sku_id","pos_id":3,"order":4},
       {"display_type":"promotion","item":"9","item_type":"sku_id","pos_id":4,"order":5},{"display_type":"query",
       "item":"9","item_type":"sku_id","pos_id":5,"order":6}],"ts":1626684732000}
 */
public class KeywordStatsApp {
    public static void main(String[] args) throws Exception {
        //TODO 1.基本环境准备
        //1.1 创建Flink流式处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(1);
        /*
        //1.3 检查点CK相关设置
        env.enableCheckpointing(5000, CheckpointingMode.AT_LEAST_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60000);
        StateBackend fsStateBackend = new FsStateBackend(
                "hdfs://hadoop102:8020/gmall/flink/checkpoint/ProductStatsApp");
        env.setStateBackend(fsStateBackend);
        System.setProperty("HADOOP_USER_NAME","atguigu");
        */
        //1.4 创建Table环境
        EnvironmentSettings setting = EnvironmentSettings
            .newInstance()
            .inStreamingMode()
            .build();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, setting);

        //TODO 2.注册自定义函数
        tableEnv.createTemporarySystemFunction("ik_analyze", KeywordUDTF.class);

        //TODO 3.创建动态表
        //3.1 声明主题以及消费者组
        String pageViewSourceTopic = "dwd_page_log";
        String groupId = "keywordstats_app_group";
        //3.2建表
        //根据数据来看，这里把common和page当作map来处理；后续找到key=item的page就能找到对应的值；
        //TO_TIMESTAMP(string1[, string2])，时间转换函数，默认格式yyyy-MM-dd HH:mm:ss；把字符串转换为时间戳；
        //FROM_UNIXTIME(numeric[, string]) 把时间戳转换为字符串；参数分别表示：ts、时间格式；
        tableEnv.executeSql(
            "CREATE TABLE page_view (" +
                " common MAP<STRING, STRING>," +
                " page MAP<STRING, STRING>," +
                " ts BIGINT," +
                " rowtime as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000,'yyyy-MM-dd HH:mm:ss'))," +
                " WATERMARK FOR rowtime AS rowtime - INTERVAL '2' SECOND) " +
                " WITH (" + MyKafkaUtil.getKafkaDDL(pageViewSourceTopic, groupId) + ")"
        );
        //TODO 4.从动态表中查询数据
        //商品页面的搜索关键词提取；
        Table fullwordTable = tableEnv.sqlQuery(
            "select page['item'] fullword,rowtime " +
                " from page_view " +
                " where page['page_id']='good_list' and page['item'] IS NOT NULL"
        );
        /*
        select函数中常规的join用法：
        SELECT *
        FROM Orders
        INNER JOIN Product
        ON Orders.product_id = Product.id

        select中表函数的join方法；
        SELECT order_id, res
        FROM Orders,     //Orders是一个Table；
        LATERAL TABLE(table_func(order_id)) t(res)
        //LATERAL TABLE表示关联；
         */
        //TODO 5.利用自定义函数  对搜索关键词进行拆分
        Table keywordTable = tableEnv.sqlQuery(
            "SELECT keyword, rowtime " +
                "FROM  " + fullwordTable + "," +
                "LATERAL TABLE(ik_analyze(fullword)) AS t(keyword)"
        );
        //TODO 6.分组、开窗、聚合
        //对keyword字段进行分组开窗操作；
        Table reduceTable = tableEnv.sqlQuery(
            "select keyword,count(*) ct,  '" + GmallConstant.KEYWORD_SEARCH + "' source," +
                "DATE_FORMAT(TUMBLE_START(rowtime, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') stt," +
                "DATE_FORMAT(TUMBLE_END(rowtime, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') edt ," +
                "UNIX_TIMESTAMP()*1000 ts from " + keywordTable +
                " group by TUMBLE(rowtime, INTERVAL '10' SECOND),keyword"
        );

        //TODO 7.转换为流
        DataStream<KeywordStats> keywordStatsDS = tableEnv.toAppendStream(reduceTable, KeywordStats.class);

        keywordStatsDS.print(">>>>");

        //TODO 8.写入到ClickHouse
        //如果对应的bean对象和sql搜索的列不是一一对应的，那么在插入时指定字段也可以，注意：字段名必须一致；
        keywordStatsDS.addSink(
            ClickHouseUtil.getJdbcSink("insert into keyword_stats_0709(keyword,ct,source,stt,edt,ts) values(?,?,?,?,?,?)")
        );

        //整体测试
        //➢ 启动 ZK、Kafka、logger.sh、ClickHouse
        //➢ 运行 BaseLogApp
        //➢ 运行 KeywordStatsApp
        //➢ 运行 rt_applog 目录下的 jar 包 ➢ 查看控制台输出
        //➢ 查看 ClickHouse 中 keyword_stats_0709 表数据
        //要多等待一段时间；
        //结果类型为：
        // >>>>:4> KeywordStats(keyword=盒子, ct=2, source=SEARCH, stt=2021-07-19 12:58:30, edt=2021-07-19 12:58:40, ts=1626757129000)
        env.execute();
    }
}
