package com.atguigu.gmall.app.dws;

import com.atguigu.gmall.app.func.UDTFKeywordsFunc;
import com.atguigu.gmall.bean.KeywordBean;
import com.atguigu.gmall.common.GmallConfig;
import com.atguigu.gmall.util.ClickHouseUtil;
import com.atguigu.gmall.util.KafkaUtil;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.jdbc.JdbcStatementBuilder;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.sql.PreparedStatement;
import java.sql.SQLException;

/**
 * @author yhm
 * @create 2022-09-16 9:12
 */
public class DwsTrafficSourceKeywordPageViewWindow {
    public static void main(String[] args) throws Exception {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */

        // TODO 3 读取page页面主题数据
        //{"common":{"ar":"420000","uid":"678","os":"iOS 13.2.3","ch":"Appstore","is_new":"0","md":"iPhone Xs Max","mid":"mid_281648","vc":"v2.1.134","ba":"iPhone"},"page":{"page_id":"good_list","item":"小米智能盒子","during_time":11304,"item_type":"keyword","last_page_id":"search"},"ts":1645406174000}
        String page_topic = "dwd_traffic_page_log";
        String groupId = "dws_traffic_source_keyword_page_view_window";
        tableEnv.executeSql("create table page_log( \n" +
                "  `common` map<STRING,STRING> , \n" +
                "  `page` map<STRING,STRING> , \n" +
                "  `ts` bigint , \n" +
                "  `rt` as TO_TIMESTAMP_LTZ(ts, 3) , \n" +
                "   WATERMARK FOR rt AS rt - INTERVAL '2' SECOND " +
                ")" + KafkaUtil.getKafkaDDL(page_topic,groupId));


        // TODO 4 过滤页面中的关键字
        Table keywordsTable = tableEnv.sqlQuery("select \n" +
                "  `page`['item'] keywords,\n" +
                "  rt \n" +
                "from page_log\n" +
                "where `page`['last_page_id']='search'\n" +
                "and `page`['item_type']='keyword'");
        tableEnv.createTemporaryView("keywords_table",keywordsTable);

        // TODO 5 拆分关键字
        // 注册拆词的函数
        tableEnv.createTemporaryFunction("split_keywords", UDTFKeywordsFunc.class);
        Table keywordTable = tableEnv.sqlQuery("SELECT \n" +
                "  keyword, \n" +
                "  rt \n" +
                "FROM keywords_table, LATERAL TABLE(split_keywords(keywords))");
        tableEnv.createTemporaryView("keyword_table",keywordTable);

        // TODO 6 开窗统计关键词次数
        Table countsTable = tableEnv.sqlQuery("select \n" +
                "  DATE_FORMAT(TUMBLE_START(rt, INTERVAL '10' second) , 'yyyy-MM-dd HH:mm:ss') AS stt,\n" +
                "  DATE_FORMAT(TUMBLE_END(rt, INTERVAL '10' second) , 'yyyy-MM-dd HH:mm:ss') AS edt,\n" +
                "  'search' source,\n" +
                "  '123' err,\n" +
                "  keyword, \n" +
                "  count(*) keyword_count, \n" +
                "  UNIX_TIMESTAMP()*1000 ts \n" +
                "from keyword_table\n" +
                "group by \n" +
                "TUMBLE(rt, INTERVAL '10' second),\n" +
                "keyword");
        DataStream<KeywordBean> keywordBeanDataStream = tableEnv.toAppendStream(countsTable, KeywordBean.class);

        keywordBeanDataStream.print("bean >>>>");

        // TODO 7 将数据写出到clickHouse
        keywordBeanDataStream.addSink(ClickHouseUtil.getClickHouseSink("insert into dws_traffic_source_keyword_page_view_window values (?,?,?,?,?,?)"));

        // TODO 8 执行任务
        env.execute(groupId);

    }
}
