package com.atguigu.gmall.realtime.app.dws;

import com.atguigu.gmall.realtime.app.func.KeywordProductC2RUDTF;
import com.atguigu.gmall.realtime.app.func.KeywordUDTF;
import com.atguigu.gmall.realtime.bean.KeywordStats;
import com.atguigu.gmall.realtime.util.ClickHouseUtil;
import com.atguigu.gmall.realtime.util.MyKafkaUtil;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author: xu
 * @desc: 从商品统计中获取关键词
 */
public class KeywordStats4ProductApp {
    public static void main(String[] args) throws Exception {
        // TODO 1.基本环境准备
        // 1.1  准备本地测试流环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 1.2 设置并行度
        env.setParallelism(1);
        // 1.3 设置Checkpoint
        // env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        // env.getCheckpointConfig().setCheckpointTimeout(60000);
        // env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3,3000L));
        // env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        // env.setStateBackend(new FsStateBackend("hdfs://node1:8020/gmall/checkpoint/KeywordStats4ProductApp"))
        // System.setProperty("HADOOP_USER_NAME", "root");

        // TODO 2.定义Table流环境
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 3.注册自定义函数
        tableEnv.createTemporarySystemFunction("ik_analyze", KeywordUDTF.class);
        tableEnv.createTemporarySystemFunction("keywordProductC2R", KeywordProductC2RUDTF.class);

        // TODO 4.将数据源定义为动态表
        String groupId = "keyword_stats_app";
        String productStatsSourceTopic = "dws_product_stats";

        tableEnv.executeSql("CREATE TABLE product_stats (" +
                " spu_name STRING," +
                " click_ct BIGINT," +
                " cart_ct BIGINT," +
                " order_ct BIGINT," +
                " stt STRING,edt STRING" +
                " ) WITH (" + MyKafkaUtil.getKafkaDDL(productStatsSourceTopic, groupId) + ")");

        // TODO 5.聚合计数
        Table keywordStatsProduct = tableEnv.sqlQuery("SELECT" +
                " keyword," +
                " ct," +
                " source," +
                " DATE_FORMAT(stt,'yyyy-MM-dd HH:mm:ss') as stt," +
                " DATE_FORMAT(edt,'yyyy-MM-dd HH:mm:ss') as edt, " +
                " UNIX_TIMESTAMP() * 1000 as ts " +
                " FROM product_stats ," +
                " LATERAL TABLE(ik_analyze(spu_name)) as T(keyword)," +
                " LATERAL TABLE(keywordProductC2R(click_ct, cart_ct,order_ct)) as T2(ct,source)");

        // TODO 6.转换为数据流
        DataStream<KeywordStats> keywordStatsProductStream = tableEnv.toAppendStream(keywordStatsProduct, KeywordStats.class);
        keywordStatsProductStream.print();

        // TODO 7.写入到ClickHouse
        keywordStatsProductStream.addSink(
                ClickHouseUtil.<KeywordStats>getJdbcSink("insert into keyword_stats(keyword,ct,source,stt,edt,ts) values(?,?,?,?,?,?)")
        );

        env.execute(KeywordStats4ProductApp.class.getSimpleName());
    }
}