package realtime.app.dws;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import realtime.app.func.KeywordUDTF;
import realtime.common.EduConfig;
import realtime.util.EnvUtil;
import realtime.util.KafkaUtil;

public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = EnvUtil.getSEE(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //注册用户自定义分词函数
        tableEnv.createTemporarySystemFunction("ik_analyze", KeywordUDTF.class);

        String topic = "dwd_traffic_page_log";
        String groupId = "dws_traffic_keyword_group";
        tableEnv.executeSql("create table page_log(" +
                " common map<string,string>," +
                " page map<string,string>," +
                " ts bigint," +
                " row_time as TO_TIMESTAMP(FROM_UNIXTIME(ts/1000))," +
                " watermark for row_time as row_time" +
                " )" + KafkaUtil.getKafkaDDL(topic,groupId));

        Table searchTable = tableEnv.sqlQuery("select\n" +
                "    page['item'] fullword,\n" +
                "    row_time\n" +
                " from page_log where page['item_type']='keyword' \n" +
                " and page['item'] is not null");
        tableEnv.createTemporaryView("search_table",searchTable);

        Table splitTable = tableEnv.sqlQuery("select " +
                " keyword," +
                " row_time" +
                " from search_table,lateral table(ik_analyze(fullword)) t(keyword)");
        tableEnv.createTemporaryView("split_table",splitTable);
//        tableEnv.executeSql("select * from split_table").print();

        Table result = tableEnv.sqlQuery("select\n" +
                "    DATE_FORMAT(TUMBLE_START(row_time, INTERVAL '10' second), 'yyyy-MM-dd HH:mm:ss') stt,\n" +
                "    DATE_FORMAT(TUMBLE_END(row_time, INTERVAL '10' second), 'yyyy-MM-dd HH:mm:ss') edt, \n" +
                //"    DATE_FORMAT(TUMBLE_START(row_time, INTERVAL '10' second), 'yyyyMMdd') cur_date,\n" +
                "    keyword,\n" +
                "    date_format(cast(current_row_timestamp() as string),'yyyy-MM-dd') cur_date," +
                "    count(*) keyword_count\n" +
                "from split_table group by keyword,TUMBLE(row_time, INTERVAL '10' second)");
        tableEnv.createTemporaryView("result",result);
       // tableEnv.executeSql("select * from `result`").print();


        tableEnv.executeSql("CREATE table doris_t(  " +
                " stt string, " +
                " edt string, " +
                " keyword string, " +
                " cur_date string, " +
                " keyword_count bigint " +
                ")WITH (" +
                "  'connector' = 'doris', " +
                "  'fenodes' = '"+ EduConfig.DORIS_FE +"', " +
                "  'table.identifier' = '"+EduConfig.DORIS_DB+".dws_traffic_source_keyword_page_view_window', " +
                "  'username' = 'root', " +
                "  'password' = 'aaaaaa', " +
                "  'sink.properties.format' = 'json', " +
                "  'sink.properties.read_json_by_line' = 'true', " +
                "  'sink.buffer-count' = '4', " +
                "  'sink.buffer-size' = '4086'," +
                "  'sink.enable-2pc' = 'false' " + // 测试阶段可以关闭两阶段提交,方便测试
                ")  ");
        tableEnv.executeSql("insert into doris_t select * from `result`");
    }
}
