package com.atguigu.flink.chapter10;

import com.atguigu.flink.bean.LoginEvent;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.cep.CEP;
import org.apache.flink.cep.PatternSelectFunction;
import org.apache.flink.cep.PatternStream;
import org.apache.flink.cep.pattern.Pattern;
import org.apache.flink.cep.pattern.conditions.SimpleCondition;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.time.Time;

import java.time.Duration;
import java.util.List;
import java.util.Map;

public class Flink02_Project_Spite_Login_1 {
       public static void main(String[] args) {
               Configuration configuration = new Configuration();
               //web  UI端口
               configuration.setInteger("rest.prot",10000);
               StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
               env.setParallelism(1);

           KeyedStream<LoginEvent, Long> stream =
                   env
                           //.readTextFile("input/LoginLog.csv")
                           .socketTextStream("hadoop162",9999)
                   .map(line -> {
                       String[] datas = line.split(",");
                       return new LoginEvent(
                               Long.valueOf(datas[0]),
                               datas[1],
                               datas[2],
                               Long.valueOf(datas[3])
                       );
                   })
                   .assignTimestampsAndWatermarks(WatermarkStrategy
                           .<LoginEvent>forBoundedOutOfOrderness(Duration.ofSeconds(3))
                           .withTimestampAssigner((log, es) -> log.getEventTime())
                   )
                   .keyBy(LoginEvent::getUserId);

           //2.定义模式
           Pattern<LoginEvent, LoginEvent> pattern = Pattern
                   .<LoginEvent>begin("fail")
                   .where(new SimpleCondition<LoginEvent>() {
                       @Override
                       public boolean filter(LoginEvent value) throws Exception {
                           return "fail".equals(value.getEventType());
                       }
                   })
                   .times(2)
                   .consecutive()
                   .within(Time.seconds(2));

           // 3.把模式作用到流上
           PatternStream<LoginEvent> patternStream = CEP.pattern(stream, pattern);
           //4.从模式流上匹配找出对应的数据

           patternStream.select(new PatternSelectFunction<LoginEvent, String>() {
               @Override
               public String select(Map<String, List<LoginEvent>> map) throws Exception {
                   LoginEvent event = map.get("fail").get(0);
                   return event.getUserId()+"在进行恶意登录";
               }
           }).print();

           // 相比之前的第八章的高级项目的恶意登录，可以处理数据乱序较为严重的情况
           //  但是 这是读取文件 如果真正的放入流中，就没这么容易了，因为流里面 如果超时可能会读取不到  因为时间戳和水印，导致窗口关闭了




               try {
                   env.execute();
               } catch (Exception e) {
                   e.printStackTrace();
               }


           }
}
