package com.bw.dws;

import com.bw.ClickHouseUtil;
import com.bw.bean.KeywordStats;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Test3 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 2. 设置并行度
        // 在代码中，在算子设置、在配置文件设置 在参数中设置
        env.setParallelism(1);

        // 表环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //1、读取页面里面
        // SQL parse failed. Encountered
        tableEnv.executeSql("\n" +
                "CREATE TABLE page_log (\n" +
                "  `common` Map<STRING,STRING>,\n" +
                "  `page` Map<STRING,STRING>,\n" +
                "  `ts` Bigint ,\n" +
                "   et as to_timestamp_ltz(ts, 3) ,\n" +
                "   watermark for et as et - interval '5' second \n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'dwd_page_yk7',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'properties.group.id' = 'test13',\n" +
                "  'scan.startup.mode' = 'earliest-offset',\n" +
                "  'format' = 'json'\n" +
                ")");

        // 过滤出搜索数据
        Table kw = tableEnv.sqlQuery("select \n" +
                "    page['item'] kw ,\n" +
                "    et \n" +
                "from page_log\n" +
                "where page['last_page_id'] = 'search' \n" +
                "and page['item_type'] = 'keyword' \n" +
                "and page['item'] is not null");
        tableEnv.createTemporaryView("kw_table", kw);

        // 3.注册函数
        tableEnv.createTemporaryFunction("kw_split", KwSplit.class);

        // 4. 炸开
        Table ex_table = tableEnv.sqlQuery("SELECT kw, keyword,et\n" +
                "FROM kw_table \n" +
                "LEFT JOIN LATERAL TABLE(kw_split(kw)) ON TRUE");
//        ex_table.execute().print();
        tableEnv.createTemporaryView("ex_table",ex_table);

        // 5. 开窗聚合
        Table result = tableEnv.sqlQuery("SELECT\n" +
        "  date_format(TUMBLE_START(et, INTERVAL '10' second), 'yyyy-MM-dd HH:mm:ss') AS stt,\n" +
        "  date_format(TUMBLE_END(et, INTERVAL '10' second), 'yyyy-MM-dd HH:mm:ss') AS edt,\n" +
        "  keyword,\n" +
        "  count(*) ct,\n" +
        "  UNIX_TIMESTAMP()*1000 ts\n" +
        "FROM ex_table\n" +
        "GROUP BY\n" +
        "  TUMBLE(et, INTERVAL '10' second),\n" +
        "  keyword");

//        result.execute().print();
        DataStream<KeywordStats> keywordStatsDataStream = tableEnv.toAppendStream(result, KeywordStats.class);
        keywordStatsDataStream.print();
        keywordStatsDataStream.addSink(ClickHouseUtil.sink("insert into kw_2203A(keyword,ct,stt,edt,ts) values(?,?,?,?,?)"));

        env.execute();
    }
}
