package com.atguigu.gmall.realtime.app.marketing.app;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.PropertyPreFilter;
import com.alibaba.fastjson.serializer.SimplePropertyPreFilter;
import com.alibaba.fastjson.support.spring.PropertyPreFilters;
import com.atguigu.gmall.realtime.app.marketing.bean.GmallEvent;
import com.atguigu.gmall.realtime.app.marketing.bean.MarketingMessage;
import com.atguigu.gmall.realtime.app.marketing.constant.MarketingConst;
import com.atguigu.gmall.realtime.app.marketing.func.BaseFilter;
import com.atguigu.gmall.realtime.app.marketing.func.MarketingRuleProcessor;
import com.atguigu.gmall.realtime.app.marketing.func.PushLimiter;
import com.atguigu.gmall.realtime.app.marketing.func.UserActionCounter;
import com.atguigu.gmall.realtime.utils.MyKafkaUtil;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.ArrayList;
import java.util.List;

public class MarketingApp {


    public static void main(String[] args) throws Exception {
        // PAGE_LOG  START  ACTION


        List<String> topicList = new ArrayList<>();
        topicList.add(MarketingConst.TOPIC_PAGE);
        topicList.add(MarketingConst.TOPIC_ACTION);
        topicList.add(MarketingConst.TOPIC_START);

        String groupId="marketing_app";
        FlinkKafkaConsumer<GmallEvent> flinkKafkaConsumer = MyKafkaUtil.getFlinkKafkaConsumer(topicList, groupId);
        //1  接收多个kafka主题
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        DataStreamSource<GmallEvent> gmallEventStream = env.addSource(flinkKafkaConsumer);
        //2  基本过滤器
        SingleOutputStreamOperator<GmallEvent> filteredStream = gmallEventStream.filter(new BaseFilter());
        //  map
        //  filter
        //  richmap
        //  richfilter
        //  process
        //3  计数器
         // 统计到redis中   每个用户各个行为的计数
         //   type ? hash        key?  user_action:uid:101        field?  order  或则 pay 或者 page     value?  数值
        // 读api 取某个用户的所有行为  hgetall     写api  为某个用户的某个行为加值      过期时间 取第二天0点 减当前时间

        SingleOutputStreamOperator<GmallEvent> countedEventStream = filteredStream.map(new UserActionCounter());
        //4  规则处理器
        //4.1  规则 分群 抓取器
        //4.2  匹配器
        //4.3  消息构造器
        SingleOutputStreamOperator<MarketingMessage> processStream = countedEventStream.process(new MarketingRuleProcessor());

        //5  推送限制
        //消息锁    一旦一条消息发送，建立一个消息锁  消息锁存在期间 该规则的消息不会再给该用户发送， 直到消息锁到时解除
        KeyedStream<MarketingMessage, Tuple2<String, String>> keyedMessageStream = processStream.keyBy(new KeySelector<MarketingMessage, Tuple2<String, String>>() {

            @Override
            public Tuple2<String, String> getKey(MarketingMessage marketingMessage) throws Exception {
                return Tuple2.of(marketingMessage.getRuleId(), marketingMessage.getUid());
            }
        });

        SingleOutputStreamOperator<MarketingMessage> limitedStream = keyedMessageStream.process(new PushLimiter());



       // limitedStream.print("--------");


        ;
        //6  发送消息主题
        SingleOutputStreamOperator<String> messageStream = limitedStream.map(new MapFunction<MarketingMessage, String>() {
            @Override
            public String map(MarketingMessage marketingMessage) throws Exception {
                PropertyPreFilters propertyPreFilters = new PropertyPreFilters();
                PropertyPreFilters.MySimplePropertyPreFilter filter = propertyPreFilters.addFilter().addExcludes("ruleId", "limitTimeUnit", "limitTimeNum");

                return JSON.toJSONString(marketingMessage, filter);
            }
        });

        messageStream.addSink(MyKafkaUtil.getKafkaProducer("topic_message"));
        env.execute();


    }
}
