package com.wuwangfu.ttl;

import com.wuwangfu.entity.ActivityBean;
import com.wuwangfu.func.ActivityCountFunc;
import com.wuwangfu.utils.FlinkUtils;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.functions.ProcessFunction;
import org.apache.flink.util.Collector;

/**
 * @Description：布隆过滤器结合TTL
 * @Author：jcshen
 * @Date：2023-07-04
 *
 * 统计最新10分钟的热门商品，1分钟统计一次（按照EventTime划分窗口，窗口长度为10分钟，1分钟滑动一次）
 * 将数据进行增量聚合，窗口触发后还要排序（reduce,WindowFunction），在WindowFunction中使用onTimer进行排序
 *
 * 测试数据
 *  2021-01-24 15:00:01,user1,act01,view
 * 2021-01-24 15:00:02,user1,act01,view
 * 2021-01-24 15:00:05,user1,act01,join
 * 2021-01-24 15:00:02,user2,act01,view
 * 2021-01-24 15:00:05,user2,act01,join
 * 2021-01-24 15:00:02,user3,act02,view
 * 2021-01-24 15:00:05,user3,act02,join
 * 2021-01-24 16:00:02,user3,act01,view
 * 2021-01-24 16:00:05,user3,act01,join
 *
 * 需求说明
 *  需求：按天、按小时、统计各个活动、不同事件的次数和人数
 *
 * 不要要再使用HashSet作为去重的集合了，而是使用布隆过滤器
 *
 * 2021-01-24,act01,view,4,3
 * 2021-01-24,act01,join,3,3
 * 2021-01-24,act02,view,1,1
 * 2021-01-24,act02,join,1,1
 *
 * 按照小时：
 * 2021-01-24 15,act01,view,3,2
 * 2021-01-24 16,act01,view,1,1
 *
 * State 设置 TTL
 *
 *
 */
public class AdvActivityCount {
    public static void main(String[] args) throws Exception {

        ParameterTool parameter = ParameterTool.fromPropertiesFile(args[0]);
        DataStream<String> lines = FlinkUtils.createKafkaStream(parameter, SimpleStringSchema.class);

        SingleOutputStreamOperator<ActivityBean> beanStream = lines.process(new ProcessFunction<String, ActivityBean>() {
            @Override
            public void processElement(String value, ProcessFunction<String, ActivityBean>.Context ctx, Collector<ActivityBean> out) throws Exception {
                String[] fields = value.split(",");
                String time = fields[0];
                String uid = fields[1];
                String aid = fields[2];
                String eid = fields[3];

                ActivityBean bean = ActivityBean.of(time, uid, aid, eid);
                out.collect(bean);
            }
        });

        KeyedStream<ActivityBean, Tuple2<String, String>> keyedStream = beanStream
                .keyBy(new KeySelector<ActivityBean, Tuple2<String, String>>() {
            @Override
            public Tuple2<String, String> getKey(ActivityBean value) throws Exception {
                return Tuple2.of(value.getUid(), value.getEid());
            }
        });

        SingleOutputStreamOperator<ActivityBean> result = keyedStream.process(new ActivityCountFunc());

        result.print();

        FlinkUtils.env.execute();
    }
}
