package com.atguigu.gmall.realtime.app.dws;

import com.atguigu.gmall.realtime.app.func.KeywordUDTF;
import com.atguigu.gmall.realtime.common.GmallConfig;
import com.atguigu.gmall.realtime.utils.KafkaUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author Felix
 * @date 2023/8/7
 * 搜索关键词聚合统计
 * 需要启动的进程
 *      zk、kafka、flume、DwdTrafficBaseLogSplit、DwsTrafficSourceKeywordPageViewWindow
 */
public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) {
        //TODO 1.基本环境准备
        //1.1 指定流处理环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //1.2 设置并行度
        env.setParallelism(4);
        //1.3 指定表执行环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //1.4 注册定义函数到表执行环境中
        tableEnv.createTemporarySystemFunction("ik_analyze", KeywordUDTF.class);

        //TODO 2.检查点相关的设置(注意：如果要写数据到Doris，检查点一定要开)
        env.enableCheckpointing(5000L);
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));

        //TODO 3.从页面日志主题中读取数据创建动态表  并指定Watermark的生成策略以及提取事件时间字段
        String topic = "dwd_traffic_page_log";
        String groupId = "dws_traffic_keyword_group";
        tableEnv.executeSql("CREATE TABLE page_log (\n" +
            "  common map<string,string>,\n" +
            "  page map<string,string>,\n" +
            "  ts bigint ,\n" +
            "  row_time as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)),\n" +
            "  WATERMARK FOR row_time AS row_time\n" +
            ")" + KafkaUtil.getKafkaDDL(topic, groupId));
        // tableEnv.executeSql("select * from page_log").print();
        //TODO 4.过滤出搜索行为
        Table searchTable = tableEnv.sqlQuery("select\n" +
            "\tpage['item'] fullword,\n" +
            "\trow_time\n" +
            "from page_log where page['last_page_id']='search' and page['item_type'] = 'keyword' ");
        tableEnv.createTemporaryView("search_table",searchTable);
        // tableEnv.executeSql("select * from search_table").print();

        //TODO 5.分词  分词的结果和表中其它字段进行关联
        Table splitTable = tableEnv.sqlQuery("SELECT keyword,row_time\n" +
            "FROM search_table,LATERAL TABLE(ik_analyze(fullword)) t(keyword)");
        tableEnv.createTemporaryView("split_table",splitTable);
        // tableEnv.executeSql("select * from split_table").print();

        //TODO 6.分组、开窗、聚合计算
        Table reduceTable = tableEnv.sqlQuery("select\n" +
            " DATE_FORMAT(TUMBLE_START(row_time, INTERVAL '10' second), 'yyyy-MM-dd HH:mm:ss') stt,\n" +
            " DATE_FORMAT(TUMBLE_END(row_time, INTERVAL '10' second), 'yyyy-MM-dd HH:mm:ss') edt,\n" +
            " date_format(TUMBLE_START(row_time, INTERVAL '10' SECOND), 'yyyyMMdd') cur_date,\n" +
            " keyword,\n" +
            " count(*) keyword_count\n" +
            "from split_table group by keyword,TUMBLE(row_time, INTERVAL '10' second)");
        tableEnv.createTemporaryView("reduce_table",reduceTable);
        // tableEnv.executeSql("select * from reduce_table").print();
        //TODO 7.将聚合的结果写到Doris
        //7.1 创建动态表和要写入的Doris表进行映射
        tableEnv.executeSql("CREATE table doris_t(  " +
            " stt string, " +
            " edt string, " +
            " cur_date string, " +
            " keyword string, " +
            " keyword_count bigint " +
            ")WITH (" +
            "  'connector' = 'doris', " +
            "  'fenodes' = '"+ GmallConfig.DORIS_FE +"', " +
            "  'table.identifier' = '"+GmallConfig.DORIS_DB+".dws_traffic_source_keyword_page_view_window', " +
            "  'username' = 'root', " +
            "  'password' = 'aaaaaa', " +
            "  'sink.properties.format' = 'json', " +
            "  'sink.properties.read_json_by_line' = 'true', " +
            "  'sink.buffer-count' = '4', " +
            "  'sink.buffer-size' = '4086'," +
            "  'sink.enable-2pc' = 'false' " + // 测试阶段可以关闭两阶段提交,方便测试
            ")  ");

        tableEnv.executeSql("insert into doris_t select * from reduce_table");

    }
}
