package com.asap.demo.rete;


import com.asap.demo.sourcefunc.MysqlSourceFunction1;
import com.asap.demo.utils.Constants;
import com.asap.demo.utils.Utils;
import com.asap.interf.Action;
import com.asap.rule.StandardEvent;
import com.asap.rule.engine.InferenceEngine;
import com.asap.rule.engine.PatternMatcher;
import com.asap.rule.orm.DbFetcher;
import com.asap.rule.util.PropTransformMap;
import com.asap.rule.util.RuleReader;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.*;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.typeutils.ListTypeInfo;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.BroadcastStream;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.annotation.Nullable;
import java.sql.Timestamp;
import java.util.*;

public class ReteDemo2 {

	private static final Logger logger = LoggerFactory.getLogger(ReteDemo2.class);

	public static void main(String[] args) throws Exception {

		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		env.setParallelism(1);
		env.enableCheckpointing(5000);  //检查点 每5000ms
		env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);

		Properties browseProperties = new Properties();
		browseProperties.put("bootstrap.servers", "192.168.1.25:9093");
		browseProperties.put("group.id", "temporal");
		browseProperties.put("auto.offset.reset", "latest");
		PropTransformMap.getInstance().readConfigMap("/home/asap/wbh/conf/cfg.properties");
		Map<String, String> configMap = new HashMap<String, String>();
		configMap.put(Constants.DB_JDBC_USER, "root");
		configMap.put(Constants.DB_JDBC_PASSWD, "1qazXSW@3edc");
		configMap.put(Constants.DB_JDBC_URL, "jdbc:mysql://192.168.1.239:3306/SSA?useUnicode=true&characterEncoding=utf-8");
		configMap.put(Constants.DB_JDBC_DRIVER, "com.mysql.jdbc.Driver");
		configMap.put(Constants.INITAL_POOL_SIZE, "10");
		configMap.put(Constants.MIN_POOL_SIZE, "5");
		configMap.put(Constants.MAX_IDLE_TIME, "50");
		configMap.put(Constants.MAX_STATE_ELEMENTS, "100");
		configMap.put(Constants.MAX_IDLE_TIME, "60");
		DbFetcher dbFetcher = new DbFetcher(configMap);
		List<String> listRule = RuleReader.readRules(dbFetcher);
		System.out.println("ListRule::" + listRule.size());

		final String RULE_SBROAD_CAST_STATE = "RulesBroadcastState";

		//1、读取mysql的配置消息
		DataStream<List<String>> conf = env.addSource(new MysqlSourceFunction1(dbFetcher));

		//2、创建MapStateDescriptor规则，对广播的数据的数据类型的规则
		MapStateDescriptor<String, List<String>> ruleStateDescriptor = new MapStateDescriptor<>(RULE_SBROAD_CAST_STATE
				, BasicTypeInfo.STRING_TYPE_INFO
				, new ListTypeInfo<>(String.class));
		//3、对conf进行broadcast返回BroadcastStream
		final BroadcastStream<List<String>> confBroadcast = conf.broadcast(ruleStateDescriptor);

		DataStream<String> dataStream = env
				.addSource(new FlinkKafkaConsumer<>(
						"flink_pressure_test1",
						new SimpleStringSchema(),
						browseProperties
				));

		DataStream<StandardEvent> kafkaData =dataStream
				.map(new MapFunction<String, StandardEvent>() {
					@Override
					public StandardEvent map(String value) throws Exception {
						StandardEvent standardEvent = StandardEvent.parse(value);
						return standardEvent;
					}
				})
				.assignTimestampsAndWatermarks(
						new AssignerWithPeriodicWatermarks<StandardEvent>() {
							Long currentMaxTimestamp = 0L;
							Long maxDelayTime = 5000L;

							@Override
							public long extractTimestamp(StandardEvent s, long l) {
								currentMaxTimestamp = Timestamp.valueOf(Utils.transforDate(s.getField("CREATE_TIME"))).getTime();
								return currentMaxTimestamp;
							}

							@Nullable
							@Override
							public Watermark getCurrentWatermark() {
								long time = currentMaxTimestamp - maxDelayTime;
								//logger.info("getCurrentWatermark.............."+time);
								return new Watermark(time);
							}
						}
				)
//				.keyBy(new KeySelector<StandardEvent, Object>() {
//					@Override
//					public Object getKey(StandardEvent event) throws Exception {
//						logger.info("====>keyBy");
//						return 1;
//					}
//				})
				.connect(confBroadcast)
				.process(
						new BroadcastProcessFunction<StandardEvent, List<String>, StandardEvent>() {
							InferenceEngine engine = null;
							boolean flag = true;
							MapStateDescriptor<String, List<String>> ruleStateDescriptor = new MapStateDescriptor<>(RULE_SBROAD_CAST_STATE
									, BasicTypeInfo.STRING_TYPE_INFO
									, new ListTypeInfo<>(String.class));
							//之前的操作记录
							//private transient ValueState<List<StandardEvent>> listState;
							private transient List<StandardEvent> listState;
							/**
							 * open方法只会执行一次
							 * 可以在这实现初始化的功能
							 *
							 * @param parameters
							 * @throws Exception
							 */
							@Override
							public void open(Configuration parameters) throws Exception {
								super.open(parameters);
//								ValueStateDescriptor<List<StandardEvent>> recentOperatorsDescriptor = new ValueStateDescriptor<List<StandardEvent>>(
//										"recent-operator",
//										TypeInformation.of(new TypeHint<List<StandardEvent>>(){}));
//
//								listState = getRuntimeContext().getState(recentOperatorsDescriptor);
								listState = new ArrayList<>();

							}

							/**
							 * 6、 处理流中的数据
							 *
							 * @param standardEvent
							 * @param readOnlyContext
							 * @param collector
							 * @throws Exception
							 */
							@Override
							public void processElement(StandardEvent standardEvent, ReadOnlyContext readOnlyContext, Collector<StandardEvent> collector) throws Exception {
								List<String> list = null;

								list = readOnlyContext.getBroadcastState(ruleStateDescriptor).get(RULE_SBROAD_CAST_STATE);
								if(list == null){
//									List<StandardEvent> lst = listState.value();
//									if (lst == null){
//										lst = new ArrayList<>();
//									}
//									lst.add(standardEvent);
									listState.add(standardEvent);
									logger.info("RulesBroadcastState is null..............");
									return;
								}

								if (((flag && list.get(0).equals("1")) || list.get(0).equals("0"))) {
									list.remove(0);
									engine = InferenceEngine.compile(RuleReader.parseRules(list));
									logger.info("action update.....:" + list.size()+":"+flag);
									if (flag && list.get(0).equals("1")) {
										flag = false;
									}
								}

								if (engine != null) {
//									Iterable<StandardEvent> iterable = listState;
//									while (iterable.iterator().hasNext()){
//										match(iterable.iterator().next(),collector);
//									}
									logger.info("listState size:.............."+listState.size());
									if (!listState.isEmpty()) {
										Iterator<StandardEvent> it1 = listState.iterator();
										while (it1.hasNext()) {
											match(it1.next(), collector);
										}
									}

									match(standardEvent,collector);
									listState.clear();
								} else {
									logger.info("processElement engine is null.....:");

								}
							}

							private void match(StandardEvent standardEvent, Collector<StandardEvent> collector) {
								PatternMatcher matcher = engine.matcher(standardEvent);
								if (matcher.find()) {
									List<Action> actions = matcher.getActions();
									for (Action action : actions) {
										System.out.println("rule_id:" + action.getRuleId() + ":::::" + standardEvent);
										collector.collect(standardEvent);
									}
								} else {
									logger.info("no matcher:" + standardEvent);

								}
							}

							/**
							 * 6、 对广播变量的获取更新
							 *
							 * @param strings
							 * @param context
							 * @param collector
							 * @throws Exception
							 */

							@Override
							public void processBroadcastElement(List<String> strings, Context context, Collector<StandardEvent> collector) throws Exception {
								BroadcastState<String, List<String>> broadcastState = context.getBroadcastState(ruleStateDescriptor);
								logger.info("processBroadcastElement.....:" + strings.size());
								if (broadcastState.contains(RULE_SBROAD_CAST_STATE)) {
									List<String> oldList = broadcastState.get(RULE_SBROAD_CAST_STATE);
									logger.info("get State:"+ oldList.size() +"  replaced with State:"+strings.size());
								} else {
									logger.info("do not find old State, put first counterState {}", strings.size());
								}
								broadcastState.put(RULE_SBROAD_CAST_STATE, strings);


							}
						}
				);
		kafkaData.print();
		env.execute("Broadcast test kafka");
	}
}
