package net.bwie.flink;

import com.alibaba.fastjson.JSON;
import lombok.SneakyThrows;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.functions.PatternProcessFunction;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.util.Collector;

import java.text.SimpleDateFormat;
import java.time.Duration;
import java.util.Date;
import java.util.List;
import java.util.Map;

/**
 * Flink CEP 复杂事件处理案例：
 *   【异常检测：同一用户5min钟内重复进线5次】
 *      模拟滴滴用户登录app（小程序）、操作时产生行为日志
 *      du1,login,2025-10-27 10:00:01
 *      du1,logout,2025-10-27 10:00:11
 *      du1,login,2025-10-27 10:00:51
 *      du1,login,2025-10-27 10:01:21
 *      du1,logout,2025-10-27 10:01:51
 *      du1,login,2025-10-27 10:03:21
 *      du1,logout,2025-10-27 10:04:11
 *      du1,login,2025-10-27 10:04:51
 *      du1,login,2025-10-27 10:05:11
 *
 *    官网文档：
 *          https://nightlies.apache.org/flink/flink-docs-release-1.17/docs/libs/cep/
 * @author xuanyu
 * @date 2025/10/27
 */
public class _03StreamDidiCepDemo {

	public static void main(String[] args) throws Exception{
		// 1.执行环境-env
		StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1);

		// 2.数据源-source
		/*
			开启虚拟机集群，启动服务，创建topic，模拟发送数据
				zk.sh start
				kfk.sh start
			kafka控制台
				[bwie@node101 ~]$ /opt/module/kafka/bin/kafka-console-producer.sh --broker-list node101:9092,node102:9092,node103:9092 --topic didi-log
		 */
		KafkaSource<String> source = KafkaSource.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setTopics("didi-log")
			.setGroupId("didi-cep-01")
			.setStartingOffsets(OffsetsInitializer.earliest())
			.setValueOnlyDeserializer(new SimpleStringSchema())
			.build();
		DataStreamSource<String> stream = env.fromSource(
			source, WatermarkStrategy.noWatermarks(), "Kafka Source"
		);
		//stream.print("kafka");

		// 3.数据转换-transformation
		// 3.0-解析字符串，封装实体类
		SingleOutputStreamOperator<DidiLog> stream30 = stream.map(
			new MapFunction<String, DidiLog>() {
				@Override
				public DidiLog map(String value) throws Exception {
					// 流中数据 value -> du1,login,2025-10-27 10:03:21
					String[] split = value.split(",");
					// 封装实体类
					DidiLog didiLog = new DidiLog(split[0], split[1], split[2]) ;
					// 返回对象
					return didiLog;
				}
			}
		);


		// 3.1-指定事件时间
		SingleOutputStreamOperator<DidiLog> stream31 = stream30.assignTimestampsAndWatermarks(
			WatermarkStrategy
				.<DidiLog>forBoundedOutOfOrderness(Duration.ofSeconds(0))
				.withTimestampAssigner(new SerializableTimestampAssigner<DidiLog>() {
					private SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
					@SneakyThrows
					@Override
					public long extractTimestamp(DidiLog element, long recordTimestamp) {
						// 日期时间
						String logTime = element.getLogTime();
						// 转换
						Date logDate = format.parse(logTime);
						// 时间戳
						return logDate.getTime();
					}
				})
		);

		// 3.2-用户分组
		KeyedStream<DidiLog, String> stream32 = stream31.keyBy(log -> log.getDidiUser());

		// 3.3-指定规则
		Pattern<DidiLog, DidiLog> pattern = Pattern
			.<DidiLog>begin("first").where(
				new SimpleCondition<DidiLog>() {
					@Override
					public boolean filter(DidiLog value) throws Exception {
						return "login".equals(value.getBehaviorType());
					}
				}
			)
			.followedBy("login").where(
				new SimpleCondition<DidiLog>() {
					@Override
					public boolean filter(DidiLog value) throws Exception {
						return "login".equals(value.getBehaviorType());
					}
				}
			)
			.times(4)
			.within(Time.minutes(5));

		// 3.4-匹配规则
		PatternStream<DidiLog> patternStream = CEP.pattern(stream32, pattern);

		// 3.5-获取匹配数据
		SingleOutputStreamOperator<String> stream35 = patternStream.process(
			new PatternProcessFunction<DidiLog, String>() {
				@Override
				public void processMatch(Map<String, List<DidiLog>> match,
				                         Context ctx,
				                         Collector<String> out) throws Exception {
					/*
						match 数据类型为Map集合，存储匹配规则流式数据，其中key何value数据含义：
							key：表示规则名称，例如上述代码中first和login
							value：表示存储符合每个规则数据，可能有多条，所以使用List列表存储
					 */
					// 获取 first 规则数据
					List<DidiLog> firstListLog = match.get("first");
					for (DidiLog firstLog : firstListLog) {
						out.collect(JSON.toJSONString(firstLog));
					}

					// 获取login 规则数据
					List<DidiLog> loginListLog = match.get("login");
					for (DidiLog loginLog : loginListLog) {
						out.collect(JSON.toJSONString(loginLog));
					}
				}
			}
		);

		// 4.数据接收器-sink
		//stream35.print();
		KafkaSink<String> sink = KafkaSink.<String>builder()
			.setBootstrapServers("node101:9092,node102:9092,node103:9092")
			.setRecordSerializer(KafkaRecordSerializationSchema.builder()
				.setTopic("login-log")
				.setValueSerializationSchema(new SimpleStringSchema())
				.build()
			)
			.setDeliveryGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
			.build();
		stream35.sinkTo(sink);

		// 5.触发执行-execute
		env.execute("StreamDidiCepDemo") ;
	}

}
