package cn.kgc.gmall.app.dws;

import cn.kgc.gmall.app.func.KeywordFunction;
import cn.kgc.gmall.bean.KeywordStats;
import cn.kgc.gmall.common.GmallConstant;
import cn.kgc.gmall.utils.ClickhouseUtil;
import cn.kgc.gmall.utils.MyKafkaUtils;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;

/**
 * 统计关键词搜索主题
 */
public class KeywordStatsApp {
    public static void main(String[] args) throws Exception {

        //TODO 0.基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置并行度
        env.setParallelism(4);
        /*
        //CK相关设置
        env.enableCheckpointing(5000, CheckpointingMode.AT_LEAST_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60000);
        StateBackend fsStateBackend = new FsStateBackend(
                "hdfs://hadoop202:8020/gmall/flink/checkpoint/ProvinceStatsSqlApp");
        env.setStateBackend(fsStateBackend);
        System.setProperty("HADOOP_USER_NAME","atkgc");
        */
        //TODO 1.定义Table流环境
        EnvironmentSettings settings = EnvironmentSettings
                .newInstance()
                .inStreamingMode()
                .build();

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, settings);

        // 1.注册UDTF函数
        tableEnv.createTemporarySystemFunction("ik_analyze", KeywordFunction.class);

        // 定义消费主题
        String groupId = "keyword_stats_app";
        String topic = "dwd_topic_page";
        // 2.定义临时表
        // FROM_UNIXTIME: 因为我们的时间戳包含毫秒  需要转化为包含秒的时间戳
        tableEnv.executeSql("CREATE TABLE page_view " +
                "(common MAP<STRING,STRING>, " +
                "page MAP<STRING,STRING>,ts BIGINT, " +
                "rowtime AS TO_TIMESTAMP(FROM_UNIXTIME(ts/1000, 'yyyy-MM-dd HH:mm:ss')) ," +
                "WATERMARK FOR  rowtime  AS  rowtime - INTERVAL '2' SECOND " +
                ") WITH("+ MyKafkaUtils.getKafkaDDL(topic,groupId) +")");

        // 3.根据需求进行过滤  小米黑盒  132432432432
        Table fullwordTable = tableEnv.sqlQuery(" SELECT page['item'] fullword ,rowtime " +
                "FROM page_view " +
                "WHERE page['page_id']='good_list'" +
                "AND page['item'] IS NOT NULL ");

        // 4.利用udtf函数 小米黑盒  132432432432  -> 小米 132432432432  黑盒 132432432432
        Table keywordTable = tableEnv.sqlQuery("select keyword,rowtime from " +
                fullwordTable +
                ",LATERAL TABLE(ik_analyze(fullword)) as t(keyword)");
        // 将数据将进行分组统计开窗和统计
        Table table = tableEnv.sqlQuery(" select keyword,count(*) ct,'" +
                GmallConstant.KEYWORD_SEARCH + "' as source," +
                " DATE_FORMAT( TUMBLE_START(rowtime,interval '10' second),'yyyy-MM-dd HH:mm:ss') stt, " +
                " DATE_FORMAT( TUMBLE_END(rowtime,interval '10' second),'yyyy-MM-dd HH:mm:ss') edt, " +
                " UNIX_TIMESTAMP() * 1000 ts " +
                " from " + keywordTable +
                " group by  TUMBLE(rowtime,interval '10' second),keyword ");

        // 转化为流
        DataStream<KeywordStats> keywordStatsDataStream = tableEnv.toAppendStream(table, KeywordStats.class);
        keywordStatsDataStream.print();
        // 将数据写出到clickhouse
        keywordStatsDataStream.addSink(ClickhouseUtil.getJdbcSink(
                "insert into keyword_stats_2022(keyword,ct,source,stt,edt,ts) values(?,?,?,?,?,?)"

        ));
        env.execute();

    }
}
