package com.atguigu.realtime.app.dws;

import com.atguigu.realtime.app.BaseSqlApp;
import com.atguigu.realtime.bean.KeywordStats;
import com.atguigu.realtime.function.IkAnalyzer;
import com.atguigu.realtime.util.FlinkSinkUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.ZoneOffset;

import static com.atguigu.realtime.common.Constant.*;


public class DwsSearchKeywordStatsApp extends BaseSqlApp {
    public static void main(String[] args) {
        new DwsSearchKeywordStatsApp().init(4004,1,"DwsSearchKeywordStatsApp");
    }
    @Override
    protected void run(StreamTableEnvironment tEnv) {
      //  tEnv.getConfig().setLocalTimeZone(ZoneOffset.ofHours(8));

        //1.建立动态表 从kafka读数据:dwd_page
        tEnv.executeSql("create table dwd_page(" +
                " page map<string,string>, " +
                " ts bigint, " +
                " et as to_timestamp_ltz(ts,3)," +
                " watermark for et as et - interval '3' second " +
                ")with(" +
                " 'connector'='kafka'," +
                " 'properties.bootstrap.servers'='"+KAFKA_BROKERS+ "'," +
                " 'properties.group.id'='DwsSearchKeywordStatsApp'," +
                " 'topic'='"+TOPIC_DWD_PAGE + "'," +
                " 'format'='json'," +
                " 'scan.startup.mode'='earliest-offset' " +
                ")");
       // tEnv.sqlQuery("select * from dwd_page ").execute().print();
        //2.过滤出需要的数据
        Table t1 = tEnv.sqlQuery("select " +
                " page['item'] kw, " +
                " et " +
                " from dwd_page " +
                " where page['page_id'] = 'good_list' " +
                " and page['item_type'] = 'keyword' " +
                " and page['item'] is not null ");
        // 注册一个临时表
        tEnv.createTemporaryView("t1",t1);

        //3.把关键词分词:用到自定义函数
            //3.1注册函数
        tEnv.createTemporaryFunction("ik_analyzer", IkAnalyzer.class);
            //3.2使用函数
        Table t2 = tEnv.sqlQuery("select " +
                " keyword, " +
                " et " +
                " from t1 " +
                " join lateral table(ik_analyzer(kw)) on true ");

        tEnv.createTemporaryView("t2",t2);
        //4.开窗聚合  // source 是个常量用'' 表示
        Table result = tEnv.sqlQuery("select " +
                " date_format(tumble_start(et,interval '5' second),'yyyy-MM-dd HH:mm:ss') stt," +
                " date_format(tumble_end(et,interval '5' second),'yyyy-MM-dd HH:mm:ss') edt," +
                " keyword," +
                " 'search' source, " +
                " count(*) ct," +
                " unix_timestamp() * 1000 ts " +
                " from t2 " +
                " group by " +
                " tumble(et,interval '5' second ), " +
                " keyword ");

        //5.把数据写入到clickhouse中
        //先转成流 再写入
       tEnv.toRetractStream(result, KeywordStats.class)
               .filter(t ->t.f0)
               .map(t ->t.f1)
               .addSink(FlinkSinkUtil.getClickHouseSink("gmall2021","keyword_stats_2021",KeywordStats.class));

    }
}
