package com.asap.demo.rete;


import com.asap.demo.sourcefunc.MysqlSourceFunction1;
import com.asap.demo.utils.Constants;
import com.asap.demo.utils.Utils;
import com.asap.interf.Action;
import com.asap.rule.StandardEvent;
import com.asap.rule.engine.InferenceEngine;
import com.asap.rule.engine.PatternMatcher;
import com.asap.rule.orm.DbFetcher;
import com.asap.rule.util.PropTransformMap;
import com.asap.rule.util.RuleReader;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.typeutils.ListTypeInfo;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.annotation.Nullable;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.*;

public class ReteDemo3 {

	private static final Logger logger = LoggerFactory.getLogger(ReteDemo3.class);

	public static void main(String[] args) throws Exception {

		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1);
		env.enableCheckpointing(5000);  //检查点 每5000ms
		env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

		Properties browseProperties = new Properties();
		browseProperties.put("bootstrap.servers", "192.168.1.25:9093");
		browseProperties.put("group.id", "temporal");
		browseProperties.put("auto.offset.reset", "latest");
		PropTransformMap.getInstance().readConfigMap("/home/asap/wbh/conf/cfg.properties");
		Map<String, String> configMap = new HashMap<String, String>();
		configMap.put(Constants.DB_JDBC_USER, "root");
		configMap.put(Constants.DB_JDBC_PASSWD, "1qazXSW@3edc");
		configMap.put(Constants.DB_JDBC_URL, "jdbc:mysql://192.168.1.25:3306/SSA?useUnicode=true&characterEncoding=utf-8");
		configMap.put(Constants.DB_JDBC_DRIVER, "com.mysql.jdbc.Driver");
		configMap.put(Constants.INITAL_POOL_SIZE, "10");
		configMap.put(Constants.MIN_POOL_SIZE, "5");
		configMap.put(Constants.MAX_IDLE_TIME, "50");
		configMap.put(Constants.MAX_STATE_ELEMENTS, "100");
		configMap.put(Constants.MAX_IDLE_TIME, "60");
		DbFetcher dbFetcher = new DbFetcher(configMap);
		List<String> listRule = RuleReader.readRules(dbFetcher);
		System.out.println("ListRule::" + listRule.size());

		final String RULE_SBROAD_CAST_STATE = "RulesBroadcastState";

		//1、读取mysql的配置消息
		DataStream<List<String>> conf = env.addSource(new MysqlSourceFunction1(dbFetcher));

		//2、创建MapStateDescriptor规则，对广播的数据的数据类型的规则
		MapStateDescriptor<String, List<String>> ruleStateDescriptor = new MapStateDescriptor<>(RULE_SBROAD_CAST_STATE
				, BasicTypeInfo.STRING_TYPE_INFO
				, new ListTypeInfo<>(String.class));
		//3、对conf进行broadcast返回BroadcastStream
		final BroadcastStream<List<String>> confBroadcast = conf.broadcast(ruleStateDescriptor);

		DataStream<String> dataStream = env
				.addSource(new FlinkKafkaConsumer<>(
						"flink_pressure_test2",
						new SimpleStringSchema(),
						browseProperties
				));

		DataStream<StandardEvent> kafkaData =dataStream
				.map(new MapFunction<String, StandardEvent>() {
					@Override
					public StandardEvent map(String value) throws Exception {
						StandardEvent standardEvent = StandardEvent.parse(value);
						return standardEvent;
					}
				})
				.assignTimestampsAndWatermarks(
						new AssignerWithPeriodicWatermarks<StandardEvent>() {
							Long currentMaxTimestamp = 0L;
							Long maxDelayTime = 5000L;

							@Override
							public long extractTimestamp(StandardEvent s, long l) {
								currentMaxTimestamp = Timestamp.valueOf(Utils.transforDate(s.getField("CREATE_TIME"))).getTime();
								return currentMaxTimestamp;
							}

							@Nullable
							@Override
							public Watermark getCurrentWatermark() {
								long time = currentMaxTimestamp - maxDelayTime;
								//logger.info("getCurrentWatermark.............."+time);
								return new Watermark(time);
							}
						}
				)
				.keyBy(new KeySelector<StandardEvent, Object>() {
					@Override
					public Object getKey(StandardEvent event) throws Exception {
						return 1;
					}
				})
				.connect(confBroadcast)
				.process(
						new KeyedBroadcastProcessFunction<String,StandardEvent, List<String>, StandardEvent>() {
							private transient ValueState<List<StandardEvent>> listState;
							private transient ValueState<Boolean> runingFlagState;
							private transient ValueState<InferenceEngine> engineState;
							MapStateDescriptor<String, List<String>> ruleStateDescriptor = new MapStateDescriptor<>(RULE_SBROAD_CAST_STATE
									, BasicTypeInfo.STRING_TYPE_INFO
									, new ListTypeInfo<>(String.class));
							InferenceEngine engine;
							/**
							 * open方法只会执行一次
							 * 可以在这实现初始化的功能
							 *
							 * @param parameters
							 * @throws Exception
							 */
							@Override
							public void open(Configuration parameters) throws Exception {
								super.open(parameters);
								ValueStateDescriptor<List<StandardEvent>> recentOperatorsDescriptor = new ValueStateDescriptor<List<StandardEvent>>(
										"recent-operator",
										TypeInformation.of(new TypeHint<List<StandardEvent>>() {
										}));

								ValueStateDescriptor<Boolean> runingFlagDescriptor = new ValueStateDescriptor<Boolean>(
										"runingFlag",
										Boolean.class);

								ValueStateDescriptor<InferenceEngine> engineDescriptor = new ValueStateDescriptor<InferenceEngine>(
										"runingFlag1",
										InferenceEngine.class);

								listState = getRuntimeContext().getState(recentOperatorsDescriptor);
								runingFlagState = getRuntimeContext().getState(runingFlagDescriptor);
								engineState = getRuntimeContext().getState(engineDescriptor);
								logger.info("KeyedBroadcastProcessFunction open");
							}

							@Override
							public void processElement(StandardEvent standardEvent, ReadOnlyContext readOnlyContext, Collector<StandardEvent> collector) throws Exception {

								List<String> list = null;

								list = readOnlyContext.getBroadcastState(ruleStateDescriptor).get(RULE_SBROAD_CAST_STATE);
								if (list == null) {
									logger.info("RulesBroadcastState is null..............");
									List<StandardEvent> lst = listState.value();
									if (lst == null) {
										lst = new ArrayList<>();
									}
									lst.add(standardEvent);
									listState.update(lst);
									return;
								}
								//第一次进来
								if (runingFlagState.value() == null) {
									logger.info("runingFlagState.value() == null");
									runingFlagState.update(true);
								}
								if (((runingFlagState.value() && list.get(0).equals("1")) || list.get(0).equals("0"))) {
									logger.info("action update.....:" + list.size() + ":" + runingFlagState.value() + ":" + list.get(0));
									String flag = list.get(0);
									list.remove(0);
									engine = InferenceEngine.compile(RuleReader.parseRules(list));
									if (runingFlagState.value() && flag.equals("1")) {
										runingFlagState.update(false);
									}
								}

								if (engine != null) {
									List<StandardEvent> listTmp = listState.value();
									if (listTmp != null) {
										for (StandardEvent standardEventTmp : listTmp) {
											logger.info("listState.....:" + standardEventTmp);
											match(standardEventTmp, collector);
										}
										listState.clear();
									}
									match(standardEvent, collector);
								} else {
									logger.info("processElement engine is null.....:");

								}
							}

							private void match(StandardEvent standardEvent, Collector<StandardEvent> collector) throws IOException {
								PatternMatcher matcher = engine.matcher(standardEvent);
								if (matcher.find()) {
									List<Action> actions = matcher.getActions();
									for (Action action : actions) {
										System.out.println("rule_id:" + action.getRuleId() + ":::::" + standardEvent);
										collector.collect(standardEvent);
									}
								} else {
									logger.info("no matcher:" + standardEvent);

								}
							}

							@Override
							public void processBroadcastElement(List<String> strings, Context context, Collector<StandardEvent> collector) throws Exception {
								BroadcastState<String, List<String>> broadcastState = context.getBroadcastState(ruleStateDescriptor);
								logger.info("processBroadcastElement.....:" + strings.size());
								if (broadcastState.contains(RULE_SBROAD_CAST_STATE)) {
									List<String> oldList = broadcastState.get(RULE_SBROAD_CAST_STATE);
									logger.info("get State:" + oldList.size() + "  replaced with State:" + strings.size());
								} else {
									logger.info("do not find old State, put first counterState {}", strings.size());
								}
								broadcastState.put(RULE_SBROAD_CAST_STATE, strings);
							}


						}
				);
		//kafkaData.print();
		env.execute("Broadcast test kafka");
	}
}
