package net.bwie.flink;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import java.time.Duration;

/**
 * 	1. 实时统计浏览量PageView
 * 		 {
 * 		  "common": {
 * 		    "ar": "440000",
 * 		    "ba": "iPhone",
 * 		    "ch": "Appstore",
 * 		    "is_new": "1",
 * 		    "md": "iPhone Xs",
 * 		    "mid": "mid_409637",
 * 		    "os": "iOS 13.3.1",
 * 		    "uid": "364",
 * 		    "vc": "v2.1.134"
 *          },
 * 		  "page": {
 * 		    "during_time": 14631,
 * 		    "last_page_id": "home",
 * 		    "page_id": "mine"
 *          },
 * 		  "ts": 1713404821000
 *        }
 * 	条件：
 * 		page 页面日志
 * 		page_id 页面ID不能为空
 * @author xuanyu
 * @date 2025/10/21
 */
public class _01RealtimePageViewDemo {

	public static void main(String[] args) throws Exception{
		// 1. 执行环境-env
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1);

		// 2. 数据源-source
		KafkaSource<String> source = KafkaSource.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setTopics("topic-log")
			.setGroupId("my-group-1")
			.setStartingOffsets(OffsetsInitializer.earliest())
			.setValueOnlyDeserializer(new SimpleStringSchema())
			.build();
		DataStreamSource<String> stream = env.fromSource(
			source, WatermarkStrategy.noWatermarks(), "Kafka Source"
		);
		//stream.print("kafka");

		// 3. 数据转换-transformation

		// 3-1. 获取page页面日志，并且page_id 不为null 数据
		SingleOutputStreamOperator<String> stream31 = stream.filter(
			new FilterFunction<String>() {
				@Override
				public boolean filter(String value) throws Exception {
					// 解析json
					JSONObject jsonObject = JSON.parseObject(value);
					// 获取page字段值
					Object page = jsonObject.get("page");
					if(null != page){
						// 获取page_id字段值
						JSONObject pageJsonObject = jsonObject.getJSONObject("page");
						String pageIdValue = pageJsonObject.getString("page_id");
						if(null != pageIdValue){
							return true ;
						}
					}
					// 不满足条件，返回false
					return false;
				}
			}
		);
		//stream31.print("page");

		// 3-2. 提取字段值，封装二元组
		SingleOutputStreamOperator<Tuple2<String, Long>> stream32 = stream31.map(
			new MapFunction<String, Tuple2<String, Long>>() {
				@Override
				public Tuple2<String, Long> map(String value) throws Exception {
					return Tuple2.of("page_view", 1L);
				}
			}
		);

		// 3-3. 分组
		KeyedStream<Tuple2<String, Long>, String> stream33 = stream32.keyBy(
			new KeySelector<Tuple2<String, Long>, String>() {
				@Override
				public String getKey(Tuple2<String, Long> value) throws Exception {
					return value.f0;
				}
			}
		);

		// 3-4. 聚合
		SingleOutputStreamOperator<Tuple2<String, Long>> stream34 = stream33.sum(1);

		// 4. 数据接收器-sink
		stream34.print("result");

		// 5. 触发执行-execute
		env.execute("RealtimePageViewDemo") ;
	}

}
