package cep;

import broadcast.StreamKafkaJoinPostgres;
import com.asap.demo.model.StandardEvent;
import com.asap.demo.utils.Utils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternFlatSelectFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.RichIterativeCondition;
import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.test.util.MiniClusterWithClientResource;
import org.apache.flink.util.Collector;
import org.junit.ClassRule;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Timestamp;
import java.util.List;
import java.util.Map;
import java.util.Properties;

public class RuleChainDemo {

	private static final Logger logger = LoggerFactory.getLogger(RuleChainDemo.class);

	@ClassRule
	public static MiniClusterWithClientResource flinkCluster =
			new MiniClusterWithClientResource(
					new MiniClusterResourceConfiguration.Builder()
							.setNumberSlotsPerTaskManager(3)
							.setNumberTaskManagers(2)
							.build());

	@Test
	public void RuleChainDemo() throws Exception {
		//ParameterTool parameterTool = ParameterTool.fromPropertiesFile("/data/asap/flink-1.11.2/custom_conf/application.properties");
		Properties browseProperties = new Properties();
		browseProperties.put("bootstrap.servers", "192.168.1.25:9093");
		browseProperties.put("group.id", "temporal");
		browseProperties.put("auto.offset.reset", "latest");

		final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
		SingleOutputStreamOperator<StandardEvent> outputStream = env
				.addSource(new FlinkKafkaConsumer<>(
						"flink_pressure_test",
						new SimpleStringSchema(),
						browseProperties
				))
				.assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.minutes(1)) {
					@Override
					public long extractTimestamp(String element) {
						return Timestamp.valueOf(Utils.transforDate(StandardEvent.parse(element).getField("CREATE_TIME"))).getTime();
					}
				}).map(new MapFunction<String, StandardEvent>() {
					@Override
					public StandardEvent map(String value) throws Exception {
						StandardEvent standardEvent = StandardEvent.parse(value);
						return standardEvent;
					}
				});


		Pattern<StandardEvent, StandardEvent> loginPattern = Pattern.<StandardEvent>begin("first")
				.where(new RichIterativeCondition<StandardEvent>() {
					@Override
					public boolean filter(StandardEvent event, Context<StandardEvent> ctx) throws Exception {
						return event.getField("EVENT_THREE_TYPE_DESC").equals("暴力破解失败");
					}
				})
				.next("second")
				.where(new RichIterativeCondition<StandardEvent>() {
					@Override
					public boolean filter(StandardEvent event, Context<StandardEvent> ctx) throws Exception {
						return event.getField("EVENT_NAME").equals("爬虫");
					}
				});

		PatternStream<StandardEvent> loginStream = CEP.pattern(outputStream, loginPattern).inProcessingTime();

		SingleOutputStreamOperator<StandardEvent> loginFailStream=loginStream.flatSelect(new PatternFlatSelectFunction<StandardEvent, StandardEvent>() {
			@Override
			public void flatSelect(Map<String, List<StandardEvent>> pattern, Collector<StandardEvent> out) throws Exception {
				List<StandardEvent> first = pattern.get("first");
				List<StandardEvent> second = pattern.get("second");

				StandardEvent standardEvent = first.get(first.size() - 1);
				out.collect(standardEvent);
				String account = standardEvent.getField("EVENT_NAME");
				logger.debug("EVENT_NAME:"+account);
			}
		});
		loginFailStream.print();
		env.execute("Blink RuleChainDemo demo");

	}
}
