package com.atguigu.gmall.realtime.app.dws;

import com.atguigu.gmall.realtime.app.BaseSqlApp;
import com.atguigu.gmall.realtime.bean.KeywordStats;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.function.IkAnalyzer;
import com.atguigu.gmall.realtime.function.KwProduct;
import com.atguigu.gmall.realtime.util.FlinkSinkUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author Archie
 * @date 2021-10-27 23:59
 * @description
 */
public class DwsProductKeyWordStatsApp extends BaseSqlApp {
	public static void main(String[] args) {
		new DwsProductKeyWordStatsApp().init(4005, 1, "DwsProductKeyWordStatsApp");
	}

	@Override
	public void run(StreamTableEnvironment tenv) {

		// 0. 创建动态表绑定Kafka
		tenv.executeSql("create table product_stats(" +
				"   stt string, " +
				"   edt string, " +
				"   sku_name string, " +
				"   click_ct bigint, " +
				"   order_ct bigint, " +
				"   cart_ct bigint " +
				")with(" +
				"   'connector' = 'kafka', " +
				"   'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092,hadoop164:9092', " +
				"   'properties.group.id' = 'DwsProductKeyWordStatsApp', " +
				"   'topic' = '" + Constant.TOPIC_DWS_PRODUCT_STATS + "', " +
				"   'scan.startup.mode' = 'latest-offset', " +  // 如果没有消费记录,则从这个配置的地方开始消费, 如果有消费记录, 从上次的位置开始消费
				"   'format' = 'json' " +
				")");


		// 1. 过滤出来三个count至少有一个不为0的数据
		Table t1 = tenv.sqlQuery("select" +
				" *" +
				"from product_stats " +
				"where click_ct > 0 " +
				"or order_ct > 0 " +
				"or cart_ct > 0 ");
		tenv.createTemporaryView("t1", t1);

		// 2. 分词
		tenv.createTemporaryFunction("ik_analyzer", IkAnalyzer.class);

		Table t2 = tenv.sqlQuery("select" +
				" stt, " +
				" edt, " +
				" word, " +
				" click_ct, " +
				" order_ct, " +
				" cart_ct " +
				"from t1 " +
				"join lateral table(ik_analyzer(sku_name)) on true");
		tenv.createTemporaryView("t2", t2);
		// 3. 列变行  表值函数
		tenv.createTemporaryFunction("kw_product", KwProduct.class);

		Table t3 = tenv.sqlQuery("select" +
				" stt, " +
				" edt, " +
				" word, " +
				" source, " +
				" ct " +
				"from t2, " +
				" lateral table(kw_product(click_ct, order_ct, cart_ct))");
		tenv.createTemporaryView("t3", t3);
		// 4. 按照 stt edt kw  source 聚合
		Table table = tenv.sqlQuery("select" +
				" stt, " +
				" edt," +
				" word keyword, " +
				" source, " +
				" sum(ct) ct, " +
				" unix_timestamp() *1000 ts " +
				"from t3 " +
				"group by stt, edt, word, source");

		// 5. 写出到ClickHouse中
		tenv
				.toRetractStream(table, KeywordStats.class)
				.filter(t -> t.f0)
				.map(t -> t.f1)
				.addSink(FlinkSinkUtil.getClickHouseSink(
						"gmall2021", "keyword_stats_2021", KeywordStats.class
				));

	}
}
