package com.bw.yk07;

import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

public class Flink_yk07_Test1 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env=StreamExecutionEnvironment.createLocalEnvironment();

        StreamTableEnvironment tEnv=StreamTableEnvironment.create(env);

        tEnv.executeSql("CREATE TABLE page_log (\n" +
                "  `common` Map<String,String>,\n" +
                "  `page` Map<String,String>,\n" +
                "  `ts` BIGINT,\n" +
                "  `rowtime` TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)),\n" +
                "   WATERMARK FOR rowtime AS rowtime - INTERVAL '10' SECOND" +
                " ) WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'base_page_log',\n" +
                "  'properties.bootstrap.servers' = 'hadoop-single:9092',\n" +
                "  'properties.group.id' = 'testGroup11',\n" +
                "  'scan.startup.mode' = 'earliest-offset',\n" +
                "  'format' = 'json'\n" +
                ")");

        tEnv.sqlQuery("select * from page_log").execute().print();

        Table table1=tEnv.sqlQuery("select page['item'] as words,rowtime from page_log" +
                " where page['item_type']='keyword' and page['last_page_id'] is not null and page['item'] is not null");

        tEnv.createTemporaryView("table1",table1);

        tEnv.createTemporaryFunction("splitfunc",SplitFunction.class);

        Table table2=tEnv.sqlQuery(
                " SELECT word,rowtime " +
                        " FROM table1,LATERAL TABLE(splitfunc(words))");

        tEnv.createTemporaryView("table2",table2);

        Table table3=tEnv.sqlQuery("select " +
                " DATE_FORMAT(TUMBLE_START(rowtime, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') AS stt, " +
                " DATE_FORMAT(TUMBLE_END(rowtime, INTERVAL '10' SECOND),'yyyy-MM-dd HH:mm:ss') AS edt, " +
                " 'SEARCH' AS source," +
                " word," +
                " count(*) as word_ct" +
                " from table2" +
                " group by word,TUMBLE(rowtime, INTERVAL '10' SECOND)");

        DataStream<Row> result= tEnv.toAppendStream(table3,Row.class);

//        result.addSink(
//                JdbcSink.sink(
//                        "insert into yk07_keywords (stt,edt,source,word,word_ct) values (?,?,?,?,?,?)",
//
//                )
//        );

        env.execute();

    }
}
