package com.atguigu.realtime.app.dws;

import com.atguigu.realtime.app.BaseTenvApp;
import com.atguigu.realtime.app.function.KeyWordUDTF;
import com.atguigu.realtime.common.ConstantTopic;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @ClassName: DWSKeyWordStatsApp
 * @Description: 搜索关键词主题宽表
 * @Author: kele
 * @Date: 2021/4/27 14:40
 **/

public class DWSKeyWordStatsApp extends BaseTenvApp {

    public static void main(String[] args) {

        new DWSKeyWordStatsApp().init(40005,2,"DWSKeyWordStatsApp");

    }

    /**
     *
     * 1、建表：从page_log中读取数据
     *    建表：与clickhouse关联的表
     *
     * 2、搜索的字段在page下的item中，
     *      -- 过滤，去除item为空 和 page_id是good_list类型
     *      --筛选出page中的item信息
     * 3、将item的数据进行炸裂，并进行统计结果
     * 4、将数据写入到clickhouse中
     *
     */
    @Override
    protected void run(StreamTableEnvironment tenv)  {


        //1、获取page页面的信息  注意page和common在json中是jsonobject类型，转化为map类型
            tenv.executeSql("create table page_view(\n" +
                    " common MAP<STRING,STRING>,\n" +
                    " page MAP<STRING,STRING>,\n" +
                    " ts BIGINT,\n" +
                    " rowtime as to_timestamp(from_unixtime(ts/1000,'yyyy-MM-dd HH:mm:ss')),\n" +
                    " WATERMARK FOR rowtime as rowtime - interval '10' second\n" +
                    " )with(\n" +
                    " 'connector' = 'kafka',\n" +
                    " 'topic'='"+ ConstantTopic.DWD_PAGE+"',\n" +
                    " 'properties.bootstrap.servers'='hadoop162:9092,hadoop163:9092,hadoop164:9092',\n" +
                    " 'properties.group.id'='DWSProvinceStatsSqlApp1',\n" +
                    " 'scan.startup.mode' = 'earliest-offset',\n" +
                    " 'format' = 'json'\n" +
                    ")");

            //建表，与clickhouse关联
            tenv.executeSql("create table keyword_stats_2021(" +
                    "   stt string," +
                    "   edt string," +
                    "   keyword string," +
                    "   source string," +
                    "   ct bigint," +
                    "   ts bigint," +
                    "   PRIMARY KEY (stt, edt, keyword, source) NOT ENFORCED" +    //主键字段，和clickhouse中的orderby字段相同（clickhouse中按照orderby的字段进行聚合
                    ")with(" +
                    "   'connector' = 'clickhouse', " +
                    "   'url' = 'clickhouse://hadoop162:8123', " +
                    "   'database-name' = 'gmall2021', " +
                    "   'table-name' = 'keyword_stats_2021'," +
                    "   'sink.batch-size' = '1', " +
                    "   'sink.flush-interval' = '1000', " +
                    "   'sink.max-retries' = '3' " +
                    ")"
            );

        //2.1、过滤数据
        Table fileTable = tenv.sqlQuery(" select \n" +
                "   page['item'] keywords,\n" +
                "   rowtime\n" +
                " from page_view\n" +
                " where page['item'] is not null and page['page_id'] = 'good_list'");

        //将过滤的表进行注册，
        tenv.createTemporaryView("ft",fileTable);
        //将自定义的炸裂函数进行注册
        tenv.createTemporaryFunction("ik_analyze", KeyWordUDTF.class);

        //2.2 使用自定义的ik分词器炸裂函数对keywords进行炸裂
        //udtf函数本质上是将数据炸裂生成一张表，通过join的方式对之前的表和炸裂的表进行join
        Table explodeTable = tenv.sqlQuery("select \n" +
                " keyword,\n" +
                " rowtime\n" +
                " from ft\n" +
                " join lateral table(ik_analyze(keywords)) as T(keyword)\n" +
                " on true");

        tenv.createTemporaryView("et",explodeTable);

        /**
         * 在窗口内对 关键字 进行聚合
         */
        Table table = tenv.sqlQuery("select \n" +
                "   date_format( TUMBLE_START(rowtime,interval '10' second),'yyyy-MM-dd HH:mm:ss' ) sst,\n" +
                "   date_format( TUMBLE_END(rowtime,interval '10' second),'yyyy-MM-dd HH:mm:ss' ) edt,\n" +
                "   keyword,\n" +
                "   'SOURCE' source,\n" +
                "   count(*) ct,\n" +
                "   unix_timestamp() * 1000 ts\n" +
                "from et\n" +
                "   group by \n" +
                "   TUMBLE(rowtime,interval '10' second),\n" +
                "   keyword");


//        table.execute().print();   //注意：在写出之前不要执行，否则无法写出到clickhouse中

        table.executeInsert("keyword_stats_2021");

//        tenv.executeSql("insert into keyword_stats_2021 select * from " +table);
    }
}
