package net.bwie.flink;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;
import org.checkerframework.checker.units.qual.K;

/**
 * Flink 日考程序2
 * @author xuanyu
 * @date 2025/10/19
 */
public class FlinkRiKaoDemo02 {

	public static void main(String[] args) throws Exception {
		// 1.执行环境-env
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1);

		// todo 8、新建Flink流处理类读取dwd_comment_info主题数据（10分）
		/*
		https://nightlies.apache.org/flink/flink-docs-release-1.17/docs/connectors/datastream/kafka/#kafka-source
		 */
		KafkaSource<String> source = KafkaSource.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setTopics("dwd_comment_info")
			.setGroupId("my-group")
			.setStartingOffsets(OffsetsInitializer.earliest())
			.setValueOnlyDeserializer(new SimpleStringSchema())
			.build();
		DataStreamSource<String> stream = env.fromSource(
			source, WatermarkStrategy.noWatermarks(), "Kafka Source"
		);

		// todo 9、根据商品Id统计评论数据量（10分）
		/*
			需求：实时统计每个商品的评论数据量
		 */
		// 9-1. 解析json，获取商品sku_id，封装二元组
		SingleOutputStreamOperator<Tuple2<String, Integer>> stream91 = stream.map(
			new MapFunction<String, Tuple2<String, Integer>>() {
				@Override
				public Tuple2<String, Integer> map(String value) throws Exception {
					// 解析json
					JSONObject jsonObject = JSON.parseObject(value);
					// 获取after值
					JSONObject after = jsonObject.getJSONObject("after");
					// 获取sku_id值
					String skuId = after.getString("sku_id");
					// 返回
					return Tuple2.of(skuId, 1);
				}
			}
		);
		// 9-2. 分组聚合
		SingleOutputStreamOperator<Tuple2<String, Integer>> stream92 = stream91
			.keyBy(
				new KeySelector<Tuple2<String, Integer>, String>() {
					@Override
					public String getKey(Tuple2<String, Integer> value) throws Exception {
						return value.f0;
					}
				}
			)
			.sum(1);
//		stream92.print("stream9");

		// todo 10、根据商品Id统计好评率（10分）
		/*
			需求：实时统计每个商品的好评率
		 */
		// 9-1. 解析json，获取商品sku_id，封装二元组
		SingleOutputStreamOperator<Tuple2<String, Integer>> stream101 = stream.process(
			new ProcessFunction<String, Tuple2<String, Integer>>() {
				@Override
				public void processElement(String value,
				                           Context ctx,
				                           Collector<Tuple2<String, Integer>> out) throws Exception {
					// 解析json
					JSONObject jsonObject = JSON.parseObject(value);
					// 获取after值
					JSONObject after = jsonObject.getJSONObject("after");
					// 获取sku_id值
					String skuId = after.getString("sku_id");
					// 获取appraise值，为评论级别
					String appraise = after.getString("appraise");
					// 当 评论为好评时：1201，输出结果
					if("1201".equals(appraise)){
						out.collect(Tuple2.of(skuId, 1));
					}
				}
			}
		);
		// 9-2. 分组聚合
		SingleOutputStreamOperator<Tuple2<String, Integer>> stream102 = stream101
			.keyBy(
				new KeySelector<Tuple2<String, Integer>, String>() {
					@Override
					public String getKey(Tuple2<String, Integer> value) throws Exception {
						return value.f0;
					}
				}
			)
			.sum(1);
		stream102.print("stream10");



		// 5.触发执行
		env.execute("FlinkRiKaoDemo02");
	}
}
