package net.bwie.flink;

import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableConfig;
import org.apache.flink.table.api.TableEnvironment;

/**
 * Flink SQL 编程，从Kafka实时消费数据，设置窗口计算，输出MySQL表中
 * @author xuanyu
 * @date 2025/10/29
 */
public class _02SqlWindowDemo {

	public static void main(String[] args) {
		// 1.表执行环境-tabEnv
		/*
		https://nightlies.apache.org/flink/flink-docs-release-1.17/docs/dev/table/common/#create-a-tableenvironment
		https://nightlies.apache.org/flink/flink-docs-release-1.17/docs/dev/table/config/#execution-options
		 */
		EnvironmentSettings settings = EnvironmentSettings
			.newInstance()
			.inStreamingMode()
			.build();
		TableEnvironment tabEnv = TableEnvironment.create(settings);
		// 设置并行度
		TableConfig config = tabEnv.getConfig();
		config.set("table.exec.resource.default-parallelism", "1") ;

		// 2.输入表-InputTable
		/*
		https://nightlies.apache.org/flink/flink-docs-release-1.17/docs/connectors/table/kafka/#how-to-create-a-kafka-table
		https://nightlies.apache.org/flink/flink-docs-release-1.17/docs/dev/table/sql/queries/window-agg/#selecting-group-window-start-and-end-timestamps-1
		 */
		tabEnv.executeSql(
			"CREATE TABLE order_kafka_source (\n" +
				"    `user_id` STRING,\n" +
				"    `order_id` STRING,\n" +
				"    `product_name` STRING,\n" +
				"    `product_price` DECIMAL(16, 2),\n" +
				"    `order_time` TIMESTAMP(3),\n" +
				"    WATERMARK FOR `order_time` AS `order_time` - INTERVAL '0' MINUTE\n" +
				") WITH (\n" +
				"  'connector' = 'kafka',\n" +
				"  'topic' = 'order-topic',\n" +
				"  'properties.bootstrap.servers' = 'node101:9092,node102:9092,node103:9092',\n" +
				"  'properties.group.id' = 'sql-order-g1',\n" +
				"  'scan.startup.mode' = 'earliest-offset',\n" +
				"  'format' = 'csv'\n" +
				")"
		);
		//tabEnv.sqlQuery("SELECT * FROM order_kafka_source").execute().print();

		// 3.数据查询Select
		Table table = tabEnv.sqlQuery(
			"SELECT\n" +
				"    window_start,\n" +
				"    window_end,\n" +
				"    product_name,\n" +
				"    count(order_id) AS order_count,\n" +
				"    sum(product_price) AS order_amount\n" +
				"FROM TABLE (\n" +
				"             TUMBLE(TABLE order_kafka_source, DESCRIPTOR(order_time), INTERVAL '2' MINUTES)\n" +
				"     )\n" +
				"GROUP BY window_start, window_end, product_name"
		);
		//table.execute().print();

		// 4.输出表-outputTable
		tabEnv.executeSql(
			"CREATE TABLE sql_order_window_report_mysql_sink (\n" +
				"    window_start STRING,\n" +
				"    window_end STRING,\n" +
				"    product_name STRING,\n" +
				"    order_count BIGINT,\n" +
				"    order_amount DECIMAL(16,2),\n" +
				"    PRIMARY KEY (window_start, window_end, product_name) NOT ENFORCED\n" +
				") WITH (\n" +
				"   'connector' = 'jdbc',\n" +
				"   'url' = 'jdbc:mysql://node101:3306/flink_day13',\n" +
				"   'table-name' = 'sql_order_window_report',\n" +
				"   'driver' = 'com.mysql.jdbc.Driver',\n" +
				"   'username' = 'root',\n" +
				"   'password' = '123456'\n" +
				")"
		);

		// 5.结果插入-INSERT
		tabEnv.createTemporaryView("report_table", table);
		tabEnv.executeSql(
			"INSERT INTO sql_order_window_report_mysql_sink\n" +
				"SELECT \n" +
				"    CAST(window_start AS STRING), \n" +
				"    CAST(window_end AS STRING), \n" +
				"    product_name, \n" +
				"    order_count, \n" +
				"    order_amount \n" +
				"FROM report_table"
		);


	}

}
