package com.asap.demo.rete;


import com.asap.demo.sourcefunc.MysqlSourceFunction;
import com.asap.demo.sourcefunc.MysqlSourceFunction1;
import com.asap.demo.utils.Constants;
import com.asap.demo.utils.Utils;
import com.asap.interf.Action;
import com.asap.rule.StandardEvent;
import com.asap.rule.engine.InferenceEngine;
import com.asap.rule.engine.PatternMatcher;
import com.asap.rule.orm.DbFetcher;
import com.asap.rule.util.PropTransformMap;
import com.asap.rule.util.RuleReader;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.java.functions.FormattingMapper;
import org.apache.flink.api.java.typeutils.ListTypeInfo;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Timestamp;
import java.util.*;

public class ReteDemo1 {

	private static final Logger logger = LoggerFactory.getLogger(ReteDemo1.class);

	public static void main(String[] args) throws Exception {

		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1);
		env.enableCheckpointing(5000);  //检查点 每5000ms
		env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

		Properties browseProperties = new Properties();
		browseProperties.put("bootstrap.servers", "192.168.1.25:9093");
		browseProperties.put("group.id", "temporal");
		browseProperties.put("auto.offset.reset", "latest");
		Map<String, String> configMap = PropTransformMap.getInstance().readConfigMap("/home/asap/wbh/cfg.properties");
		DbFetcher dbFetcher = new DbFetcher(configMap);
		//1、读取mysql的配置消息,并创建engine广播对象
		DataStream<InferenceEngine> conf = env.addSource(new MysqlSourceFunction(dbFetcher));
		//2、创建MapStateDescriptor规则，对广播的数据的数据类型的规则
		MapStateDescriptor<String, InferenceEngine> ruleStateDescriptor = new MapStateDescriptor<>("RulesBroadcastState"
				, String.class
				, InferenceEngine.class);
		//3、对conf进行broadcast返回BroadcastStream
		final BroadcastStream<InferenceEngine> confBroadcast = conf.broadcast(ruleStateDescriptor);

		DataStream<StandardEvent> kafkaData = env
				.addSource(new FlinkKafkaConsumer<>(
						"flink_pressure_test",
						new SimpleStringSchema(),
						browseProperties
				))
				.assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.minutes(1)) {
					@Override
					public long extractTimestamp(String element) {
						return Timestamp.valueOf(Utils.transforDate(StandardEvent.parse(element).getField("CREATE_TIME"))).getTime();
					}
				}).map(new MapFunction<String, StandardEvent>() {
					@Override
					public StandardEvent map(String value) throws Exception {
						StandardEvent standardEvent = StandardEvent.parse(value);
						return standardEvent;
					}
				}).connect(confBroadcast)
				.process(
						new BroadcastProcessFunction<StandardEvent, InferenceEngine, StandardEvent>() {
							@Override
							public void processElement(StandardEvent standardEvent, ReadOnlyContext readOnlyContext, Collector<StandardEvent> collector) throws Exception {
								InferenceEngine engine =null;
								while (true) {
									engine = readOnlyContext.getBroadcastState(ruleStateDescriptor).get("RulesBroadcastState");
									if (engine != null) {
										break;
									}
									Thread.sleep(1000);
								}

								PatternMatcher matcher = engine.matcher(standardEvent);
								if (matcher.find()) {
									List<Action> actions = matcher.getActions();
									for (Action action : actions) {
										standardEvent.setActions(actions);
										collector.collect(standardEvent);
									}
								} else {
									System.out.println("action11111");
								}


							}

							@Override
							public void processBroadcastElement(InferenceEngine inferenceEngine, Context context, Collector<StandardEvent> collector) throws Exception {
								BroadcastState<String, InferenceEngine> broadcastState = context.getBroadcastState(ruleStateDescriptor);
								broadcastState.put("RulesBroadcastState", inferenceEngine);
							}
						}
				).broadcast();
		kafkaData.map(new MapFunction<StandardEvent, StandardEvent>() {
			@Override
			public StandardEvent map(StandardEvent value) throws Exception {
				for (Action action : value.getActions()) {
					System.out.println("rule_id:"+action.getRuleId()+":::::"+value);
				}
				return value;
			}
		});
		env.execute("Broadcast test kafka");
	}
}
