package dws.app;

import com.bw.gmall.realtime.common.base.BaseSqlApp;
import com.bw.gmall.realtime.common.bean.KeywordStats;
import com.bw.gmall.realtime.common.constant.Constant;
import com.bw.gmall.realtime.common.util.ClickHouseUtil;
import com.bw.gmall.realtime.common.util.SQLUtil;
import dws.functions.KwSplit;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwsTrafficSourceKeywordPageViewWindow extends BaseSqlApp {
    public static void main(String[] args) throws Exception {
        new DwsTrafficSourceKeywordPageViewWindow().start(10021, 4, Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW);
    }

    @Override
    public void handle(StreamExecutionEnvironment env, StreamTableEnvironment tableEnv, String groupId)  {
        // 1. 读取 页面日志
        tableEnv.executeSql("create table page_log(" +
                " page map<string, string>, " +
                " ts bigint, " +
                " et as to_timestamp_ltz(ts, 3), " +
                " watermark for et as et - interval '5' second " +
                ")" + SQLUtil.getKafkaSourceSQL(Constant.TOPIC_DWD_TRAFFIC_PAGE,Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW));

        // 2. 读取搜索关键词
        Table kwTable = tableEnv.sqlQuery("select " +
                "page['item'] kw, " +
                "et " +
                "from page_log " +
                "where ( page['last_page_id'] ='search' " +
                " or page['last_page_id'] ='home' " +
                " )" +
                "and page['item_type']='keyword' " +
                "and page['item'] is not null ");
        tableEnv.createTemporaryView("kw_table", kwTable);


        // 3. 自定义分词函数
        tableEnv.createTemporaryFunction("kw_split", KwSplit.class);

        Table keywordTable = tableEnv.sqlQuery("select " +
                " keyword, " +
                " et " +
                "from kw_table " +
                "join lateral table(kw_split(kw)) on true ");
        tableEnv.createTemporaryView("keyword_table", keywordTable);
//
//
//        // 3. 开窗聚和 tvf
//        Table result = tableEnv.sqlQuery("select " +
//                " date_format(window_start, 'yyyy-MM-dd HH:mm:ss') stt, " +
//                " date_format(window_end, 'yyyy-MM-dd HH:mm:ss') edt, " +
//                " keyword," +
//                " count(*) ct," +
//                " UNIX_TIMESTAMP()*1000 ts " +
//                "from table( tumble(table keyword_table, descriptor(et), interval '5' second ) ) " +
//                "group by window_start, window_end, keyword ");


        Table result = tableEnv.sqlQuery("SELECT\n" +
                "  date_format(TUMBLE_START(et, INTERVAL '10' second), 'yyyy-MM-dd HH:mm:ss') AS stt,\n" +
                "  date_format(TUMBLE_END(et, INTERVAL '10' second), 'yyyy-MM-dd HH:mm:ss') AS edt,\n" +
                "  keyword,\n" +
                "  count(*) ct,\n" +
                "  UNIX_TIMESTAMP()*1000 ts\n" +
                "FROM keyword_table\n" +
                "GROUP BY\n" +
                "  TUMBLE(et, INTERVAL '10' second),\n" +
                "  keyword");

//        result.execute().print();
        // 5.写到指定地方 表--流
        DataStream<KeywordStats> keywordStatsDataStream = tableEnv.toAppendStream(result, KeywordStats.class);
        keywordStatsDataStream.addSink(ClickHouseUtil.sink("insert into kw_2203A(keyword,ct,stt,edt,ts) values (?,?,?,?,?)"));

        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }






// -----------------------------------此处代码别看-------------------------
//        tableEnv.executeSql(" CREATE TABLE kw_2203A_table (\n" +
//                "  stt String,\n" +
//                "  edt String,\n" +
//                "  keyword String,\n" +
//                "  ct BIGINT,\n" +
//                "  ts BIGINT ,\n" +
//                "  primary key(stt,edt,keyword) not enforced\n" +
//                ") WITH (\n" +
//                "  'connector' = 'clickhouse',\n" +
//                "  'database-name' = 'default',\n" +
//                "  'url' = 'jdbc:clickhouse://hadoop102:8123',\n" +
//                "  'tableName' = 'kw_2203A',\n" +
//                "  'sink.batch-size' = '100', " +
//                "  'sink.flush-interval' = '1000', " +
//                "  'sink.max-retries' = '3' " +
//                ")");

//        tableEnv.executeSql("create table kw_2203A_table(" +
//                " stt string, " +
//                " edt string, " +
//                " keyword  string, " +
//                " ct  bigint, " +
//                " ts  bigint, " +
//                " primary key(stt,edt,keyword) not enforced" +
//                ")with(" +
//                " 'connector' = 'clickhouse', " +
//                " 'url' = 'clickhouse://hadoop102:8123', " +
//                " 'database-name' = 'default', " +
//                " 'table-name' = 'kw_2203A', " +
//                " 'sink.batch-size' = '100', " +
//                " 'sink.flush-interval' = '1000', " +
//                " 'sink.max-retries' = '3' " +
//                ")");

//        result.executeInsert("kw_2203A_table");
//        result.insertInto("kw_2203A_table").execute();

    }
}
