package net.bwie.flink;

import com.alibaba.fastjson.JSON;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.jdbc.JdbcConnectionOptions;
import org.apache.flink.connector.jdbc.JdbcExecutionOptions;
import org.apache.flink.connector.jdbc.JdbcSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;

/**
 * Flink 实时从Kafka队列消费数据，按照需求进行汇总（词频统计，窗口计算）
 * @author xuanyu
 * @date 2025/10/28
 */
public class _06GmallAggregateJob {

	public static void main(String[] args) throws Exception{
		// 1.执行环境-env
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1);

		// 2.数据源-source
		KafkaSource<String> source = KafkaSource.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setTopics("dwd-order")
			.setGroupId("dwd-order-g1")
			.setStartingOffsets(OffsetsInitializer.earliest())
			.setValueOnlyDeserializer(new SimpleStringSchema())
			.build();
		DataStreamSource<String> stream = env.fromSource(
			source, WatermarkStrategy.noWatermarks(), "Kafka Source"
		);
		//stream.print("dwd");

		// 3. 数据转换-transformation
		/*
			todo 实时统计每个用户购买商品总数量
				map -> keyBy -> sum
		 */
		// 3-1.提取字段值，封装二元组
		SingleOutputStreamOperator<Tuple2<String, Long>> stream31 = stream.map(
			new MapFunction<String, Tuple2<String, Long>>() {
				@Override
				public Tuple2<String, Long> map(String value) throws Exception {
					// 提取user_id
					String userId = JSON.parseObject(value).getJSONObject("info").getString("user_id");
					// 提取sku_num
					Long skuNum = JSON.parseObject(value).getJSONObject("detail").getLong("sku_num");
					// 返回
					return Tuple2.of(userId, skuNum);
				}
			}
		);

		// 3-2.按照用户分组
		KeyedStream<Tuple2<String, Long>, String> stream32 = stream31.keyBy(tuple -> tuple.f0);

		// 3-3.聚合
		SingleOutputStreamOperator<Tuple2<String, Long>> stream33 = stream32.sum(1);
//		stream33.print("result");


		// 4.数据接收器-sink
		/*
-- 创建数据库
CREATE DATABASE IF NOT EXISTS flink_day12;
-- 创建表
CREATE TABLE IF NOT EXISTS flink_day12.gmall_order_user_report(
    user_id varchar(255) PRIMARY KEY ,
    sku_num bigint
) ;

-- 插入数据语句
REPLACE INTO flink_day12.gmall_order_user_report(user_id, sku_num) VALUES (?, ?);
		 */
		SinkFunction<Tuple2<String, Long>> jdbcSink = JdbcSink.sink(
			"REPLACE INTO flink_day12.gmall_order_user_report(user_id, sku_num) VALUES (?, ?)",
			(statement, tuple) -> {
				statement.setObject(1, tuple.f0);
				statement.setObject(2, tuple.f1);
			},
			JdbcExecutionOptions.builder()
				.withBatchSize(1000)
				.withBatchIntervalMs(200)
				.withMaxRetries(5)
				.build(),
			new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
				.withUrl("jdbc:mysql://node101:3306/flink_day12")
				.withDriverName("com.mysql.jdbc.Driver")
				.withUsername("root")
				.withPassword("123456")
				.build()
		);
		stream33.addSink(jdbcSink);

		// 5.触发执行-execute
		env.execute("GmallAggregateJob");
	}

}
