package com.atguigu.realtime.app.dws;

import com.atguigu.realtime.app.BaseSqlApp;
import com.atguigu.realtime.bean.KeywordStats;
import com.atguigu.realtime.function.KeyWordUdtf;
import com.atguigu.realtime.function.KwProduct;
import com.atguigu.realtime.util.FlinkSinkUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static com.atguigu.realtime.common.Constant.TOPIC_DWS_PRODUCT_STATS;

/**
 * Created by GodRUI on 2021/7/5 18:56
 */
public class DwsProductKeywordStatsApp extends BaseSqlApp {

    public static void main(String[] args) {
        new DwsProductKeywordStatsApp().init(4005,1,"DwsProductKeywordStatsApp");
    }

    @Override
    public void run(StreamTableEnvironment tEnv) {
        // 与kafka的topic关联
        tEnv.executeSql(
            " create table product_stats(" +
                " stt string, " +
                " edt string, " +
                " sku_name string, " +
                " click_ct bigint, " +
                " order_ct bigint, " +
                " cart_ct bigint " +
                ")with(" +
                " 'connector' = 'kafka'," +
                " 'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092,hadoop164:9092'," +
                " 'properties.group.id' = 'DwsProductKeywordStatsApp'," +
                " 'topic' = '"+TOPIC_DWS_PRODUCT_STATS+"'," +
                " 'scan.startup.mode' = 'latest-offset', " +
                " 'format' = 'json') "
        );

        // 1. 过滤出数量至少一个不为0的记录
        Table t1 = tEnv.sqlQuery(
            "select * from product_stats where click_ct>0 or order_ct>0 or cart_ct>0"
        );

        tEnv.createTemporaryView("t1",t1);

        // 2. 对关键词进行分词
        tEnv.createTemporaryFunction("ik_analyzer", KeyWordUdtf.class);
        Table t2 = tEnv.sqlQuery(
            "select stt,edt,word,click_ct,order_ct,cart_ct" +
                " from t1" +
                " join lateral table(ik_analyzer(sku_name)) on true"
        );
        tEnv.createTemporaryView("t2",t2);

        // 3. 要不要对t2中的数据做聚合?   需要! 聚合字段? stt edt word
        Table t3 = tEnv.sqlQuery(
            " select stt,edt,word,sum(click_ct) click_ct,sum(order_ct) order_ct,sum(cart_ct) cart_ct" +
                " from t2" +
                " group by stt,edt,word "
        );
        tEnv.createTemporaryView("t3",t3);

        // 4. 把多列变多行
        tEnv.createTemporaryFunction("kw_product", KwProduct.class);
        Table result = tEnv.sqlQuery(
            " select stt,edt,source,word keyword,ct,unix_timestamp() * 1000 ts" +
                " from t3" +
                " join lateral table(kw_product(click_ct,order_ct,cart_ct)) on true"
        );

        // 5. 把数据写入到ClickHouse中
        tEnv.toRetractStream(result, KeywordStats.class)
            .filter(x-> x.f0)
            .map(x->x.f1)
            .addSink(FlinkSinkUtil.getClickhouseSink("gmall2021","keyword_stats_2021",KeywordStats.class));
    }
}
