package com.bw.dws;

import com.bw.bean.KwEntity;
import com.bw.fun.XT3UDTF;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class XT3 {
    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment bsEnv = StreamExecutionEnvironment.getExecutionEnvironment();
        EnvironmentSettings bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(bsEnv, bsSettings);

        tEnv.executeSql("CREATE TABLE page_log (\n" +
                "  `common` Map<String,String>,\n" +
                "  `page` Map<String,String>,\n" +
                "  `ts` BIGINT,\n" +
                "   times AS TO_TIMESTAMP(FROM_UNIXTIME(ts/1000)),\n" +
                "   WATERMARK FOR times AS times - INTERVAL '0' SECOND\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'dwd_page_log_yk2',\n" +
                "  'properties.bootstrap.servers' = 'hadoop102:9092',\n" +
                "  'properties.group.id' = 'group1',\n" +
                "  'scan.startup.mode' = 'earliest-offset',\n" +
                "  'format' = 'json'\n" +
                ")");

        //注册自定义函数
        tEnv.createTemporarySystemFunction("myudtf", XT3UDTF.class);


        //ts,keyword
        Table table = tEnv.sqlQuery(
                "SELECT ts,times, keyword  " +
                        "FROM page_log, LATERAL TABLE(myudtf(page['item'])) " +
                        " where page['last_page_id']='search'" +
                        " and page['item_type']='keyword'" +
                        " and page['item'] is not null");

        tEnv.createTemporaryView("t_kw",table);

//        tEnv.sqlQuery("select * from t_kw").execute().print();

        //stt edt 'source' keyword cnt ts
        Table table1 = tEnv.sqlQuery("select\n" +
                "                DATE_FORMAT(TUMBLE_START(times,INTERVAL '1' SECOND),'yyyy-MM-dd HH:mm:ss') as stt,\n" +
                "                DATE_FORMAT(TUMBLE_END(times,INTERVAL '1' SECOND),'yyyy-MM-dd HH:mm:ss')  as edt,\n" +
                "                'source' as source,\n" +
                "                unix_timestamp()*1000 as ts,\n" +
                "                keyword,\n" +
                "                count(1) cnt\n" +
                "                from t_kw\n" +
                "                group by TUMBLE(times,INTERVAL '1' SECOND),keyword");
                /*
                select
                TUMBLE_START(times,INTERVAL '10' SECOND) as stt,
                TUMBLE_END(times,INTERVAL '10' SECOND) as edt,
                'source' as source,
                unix_timestamp() as ts,
                keyword,
                count(1) cnt
                from t_kw
                group by TUMBLE(times,INTERVAL '10' SECOND),keyword
                 */
        //写入clickhouse
        /*
        create table kw(
                   stt String,
                   edt String,
                   source String,
                   ts Int64,
                   keyword String,
                   cnt Int64
)engine = ReplacingMergeTree(ts)
     order by (stt,edt,keyword,source);
         */
        DataStream<KwEntity> result = tEnv.toAppendStream(table1, KwEntity.class);
        result.print();
//        result.addSink(new XT3SinkClickhouse());
        bsEnv.execute();

    }
}
