package com.atguigu.gmall.realtime.app.dws;

import com.atguigu.gmall.realtime.app.BaseSqlApp;
import com.atguigu.gmall.realtime.bean.ProvinceStats;
import com.atguigu.gmall.realtime.common.Constant;
import com.atguigu.gmall.realtime.util.FlinkSinkUtil;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author Archie
 * @date 2021-10-27 18:31
 * @description
 */
public class DwsProvinceStatsApp extends BaseSqlApp {

	public static void main(String[] args) {
		new DwsProvinceStatsApp().init(
				4003,
				1,
				"DwsProvinceStatsApp"
		);
	}

	@Override
	public void run(StreamTableEnvironment tenv) {
		// 1. 创建一个动态表和Kafka的topic进行关联
		tenv.executeSql("create table order_wide(" +
				"	province_id bigint, " +
				"	province_name string, " +
				"	province_area_code string, " +
				"	province_iso_code string, " +
				"	province_3166_2_code string, " +
				"	order_id bigint, " +
				"	split_total_amount decimal(20, 2), " +
				"	create_time string, " +
				"	et as to_timestamp(create_time), " +
				"	watermark for et as et - interval '5' second " +
				") with(" +
				"	'connector' = 'kafka', " +
				"	'properties.bootstrap.servers' = 'hadoop101:9092,hadoop102:9092, hadoop103:9092', " +
				"	'properties.group.id' = 'DwsProvinceStatsApp', " +
				"	'topic' = ' " + Constant.TOPIC_DWM_ORDER_WIDE + " ', " +
				"	'scan.startup.mode' = 'latest-offset', " + // 如果没有消费记录,则从这个配置的地方开始消费, 如果有消费记录, 从上次的位置开始消费
				"	'format' = 'json' " +
				")");
		tenv.sqlQuery("select * from order_wide").execute().print();

		// 2. 开窗聚合 (分组滚动)
		Table resultTable = tenv.sqlQuery("select" +
				"	date_format(tumble_start(et, interval '5' second), 'yyyy-MM-dd HH:mm:ss') stt, " +
				"	date_format(tumble_end(et, interval '5' second), 'yyyy-MM-dd HH:mm:ss') edt, " +
				" 	province_id, " +
				" 	province_name, " +
				"	 province_area_code area_code," +
				" 	province_iso_code iso_code, " +
				" 	province_3166_2_code iso_3166_2, " +
				" 	sum(split_total_amount) order_amount, " +
				" 	count(distinct(order_id)) order_count, " +
				" 	unix_timestamp()*1000 ts " +
				"from order_wide " +
				"group by " +
				"	province_id, " +
				"	province_name, " +
				"	province_area_code, " +
				"	province_iso_code, " +
				"	province_3166_2_code, " +
				"	tumble(et, interval '5' second)");

		// 3. 转成流写入ClickHouse
		tenv
				.toRetractStream(resultTable, ProvinceStats.class)
				.filter(t -> t.f0)
				.map(t -> t.f1)
				.addSink(FlinkSinkUtil.getClickHouseSink(
						"gmall2021", "province_stats_2021", ProvinceStats.class
				));

	}
}
