package com.bw.gmall.realtime.dws.app;

import com.bw.gmall.realtime.common.base.BaseSqlApp;
import com.bw.gmall.realtime.common.constant.Constant;
import com.bw.gmall.realtime.common.util.SQLUtil;
import com.bw.gmall.realtime.dws.function.KwSplit;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwsTrafficSourceKeywordPageViewWindow extends BaseSqlApp {
    public static void main(String[] args) {
        new DwsTrafficSourceKeywordPageViewWindow().start(Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW, 1, 10021);
    }

    @Override
    public void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv, String groupId) {
        //1.获取日志表搜索的数据
        tEnv.executeSql("create table page_log(\n" +
                "  page map<String,String>,\n" +
                "  common map<String,String>,\n" +
                "  ts bigint,\n" +
                " et as to_timestamp_ltz(ts, 3), " +
                " watermark for et as et - interval '5' second " +
                ")"+ SQLUtil.getKafkaSourceSQL(Constant.TOPIC_DWD_TRAFFIC_PAGE,Constant.TOPIC_DWD_TRAFFIC_PAGE));

        //2.获取搜索商品的数据
        Table table = tEnv.sqlQuery("select \n" +
                "`page`['item'] item,\n" +
                " et\n" +
                " from page_log\n" +
                "where `page`['last_page_id'] = 'search'\n" +
                "and `page`['item_type'] = 'keyword'");



        tEnv.createTemporarySystemFunction("SplitFunction", KwSplit.class);
        tEnv.createTemporaryView("word",table);

        Table table1 = tEnv.sqlQuery("select \n" +
                " item,\n" +
                " word,\n" +
                " et\n" +
                " from word,lateral table(SplitFunction(item))");
        tEnv.createTemporaryView("words",table1);

//        tEnv.sqlQuery("select * from words").execute().print();

        Table table2 = tEnv.sqlQuery("select \n" +
                "date_format(TUMBLE_START(et,interval '5' second),'yyyy-MM-dd HH:mm:ss') as stt, \n" +
                "date_format(TUMBLE_END(et,interval '5' second),'yyyy-MM-dd HH:mm:ss') as edt, \n" +
                "date_format(now(),'yyyyMMdd') cur_date," +
                "word keyword," +
                "count(*) keyword_count\n" +
                "from words GROUP BY " +
                "TUMBLE(et,interval '5' second)," +
                "word");

//        table2.execute().print();

        Table result = tEnv.sqlQuery("select \n" +
                "date_format(window_start,'yyyy-MM-dd HH:mm:ss') stt," +
                "date_format(window_end,'yyyy-MM-dd HH:mm:ss')edt," +
                "date_format(now(),'yyyyMMdd') cur_date," +
                "word keyword," +
                "count(*) keyword_count " +
                "from table( tumble(table words ,descriptor(et),interval '5' second))" +
                "group by window_start,window_end,word");

//        result.execute().print();


        //5.写入doris
        tEnv.executeSql("create table dws_traffic_source_keyword_page_view_window (" +
                " stt STRING," +
                " edt STRING," +
                " cur_date STRING," +
                " keyword STRING," +
                " keyword_count BIGINT" +
                ")" +SQLUtil.getDorisSinkSQL(Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW));

//        tEnv.sqlQuery("select * from dws_traffic_source_keyword_page_view_window").execute().print();

        //使用doris必须开启ck
        result.insertInto(Constant.DWS_TRAFFIC_SOURCE_KEYWORD_PAGE_VIEW_WINDOW).execute();

//        tEnv.executeSql("CREATE TABLE dws_traffic_source_keyword_page_view_window (\n" +
//                "    stt STRING,\n" +
//                "    edt STRING,\n" +
//                "    cur_date STRING,\n" +
//                "    keyword STRING,\n" +
//                "    keyword_count BIGINT\n" +
//                "    ) \n" +
//                "    WITH (\n" +
//                "      'connector' = 'doris',\n" +
//                "      'fenodes' = 'hadoop102:7030',\n" +
//                "      'table.identifier' = 'gmall2023_realtime.dws_traffic_source_keyword_page_view_window',\n" +
//                "      'username' = 'root',\n" +
//                "      'password' = 'aaaaaa',\n" +
//                "      'sink.label-prefix' = 'doris_label'\n" +
//                ")");
        //tEnv.createTemporaryView("result1",result);
        //tEnv.executeSql("INSERT INTO dws_traffic_source_keyword_page_view_window select stt,edt,cur_date,keyword,keyword_count from result1");

    }
}