package groupOne.app.DWS;

import groupOne.app.BaseAppSql;
import groupOne.common.Constant;
import groupOne.util.IkAnalyzer;
import groupOne.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwsTrafficSourceKeywordPageViewWindow extends BaseAppSql {
    public static void main(String[] args) {
        new DwsTrafficSourceKeywordPageViewWindow().init(5012,2,"DwsTrafficSourceKeywordPageViewWindow");
    }

    @Override
    protected void handle(StreamExecutionEnvironment env,
                          StreamTableEnvironment tEnv) {
        // 1. 创建动态表与topic关联: page
        tEnv.executeSql("create table page(" +
                " page map<string, string>, " +
                " ts bigint, " +
                " et as to_timestamp_ltz(ts, 3), " +
                " watermark for et as et - interval '3' second " +
                ")"
                + SQLUtil.getKafkaSource(Constant.TOPIC_DWD_TRAFFIC_PAGE, "Dws_01_DwsTrafficSourceKeywordPageViewWindow"));
        // 2. 过滤出关键词
        Table keywordTable = tEnv.sqlQuery("select " +
                " page['item'] keyword, " +
                " et " +
                "from page " +
                "where page['last_page_id']= 'search' " +
                "and page['item_type']='keyword' " +
                "and page['item'] is not null");
        tEnv.createTemporaryView("keyword_table", keywordTable);

        // 3. 对关键词进行分词
        // 自定义函数  标量 制表 聚合 制表聚合
        tEnv.createTemporaryFunction("ik_analyzer", IkAnalyzer.class);
        Table kwTable = tEnv.sqlQuery("select" +
                " kw, " +
                " et " +
                "from keyword_table " +
                "join lateral table(ik_analyzer(keyword)) on true");
        tEnv.createTemporaryView("kw_table", kwTable);
        // 4. 对分词后的次,开窗聚合统计次数
        Table result = tEnv.sqlQuery("select " +
                " date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt, " +
                " date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt, " +
                " 'search' source, " +
                " kw keyword,  " +
                " date_format(window_start, 'yyyy-MM-dd') cur_date, " +  // 这个是表示统计日期
                " count(*) keyword_count " +
                "from table( tumble( table kw_table, descriptor(et), interval '5' second ) ) " +
                "group by window_start, window_end, kw");

        // 5. 写出到 doris 中
        tEnv.executeSql("create table kw(" +
                " stt string, " +
                " edt string, " +
                " source string, " +
                " keyword string, " +
                " cur_date string, " +
                " keyword_count bigint " +
                ")with(" +
                "  'connector' = 'doris', " +
                "  'fenodes' = 'hadoop162:7030', " +
                "  'table.identifier' = 'gmall2022.dws_traffic_source_keyword_page_view_window', " +
                "  'username' = 'root', " +
                "  'password' = 'aaaaaa' " +
                ")");

        result.executeInsert("kw");
/*
drop table if exists dws_traffic_source_keyword_page_view_window;
create table if not exists dws_traffic_source_keyword_page_view_window
(
    `stt`           DATETIME comment '窗口起始时间',
    `edt`           DATETIME comment '窗口结束时间',
    `source`        VARCHAR(10) comment '关键词来源',
    `keyword`       VARCHAR(10) comment '关键词',
    `cur_date`      DATE comment '当天日期',
    `keyword_count` BIGINT replace comment '关键词评分'
) engine = olap aggregate key (`stt`, `edt`, `source`, `keyword`, `cur_date`)
comment "流量域来源-关键词粒度页面浏览汇总表"
partition by range(`cur_date`)()
distributed by hash(`keyword`) buckets 10 properties (
  "replication_num" = "3",
  "dynamic_partition.enable" = "true",
  "dynamic_partition.time_unit" = "DAY",
  "dynamic_partition.start" = "-1",
  "dynamic_partition.end" = "1",
  "dynamic_partition.prefix" = "par",
  "dynamic_partition.buckets" = "10",
  "dynamic_partition.hot_partition_num" = "1"
);
*/

    }
}
