package cn._51doit.live.jobs;

import cn._51doit.live.pojo.DataBean;
import cn._51doit.live.udfs.AudienceCountFunction;
import cn._51doit.live.udfs.JsonToBeanFunctionV2;
import cn._51doit.live.utils.EventType;
import cn._51doit.live.utils.FlinkUtils;
import cn._51doit.live.utils.MyKafkaDeserializationSchema;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;


public class AudienceCount {


    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        DataStream<Tuple2<String, String>> dataStreamWithId = FlinkUtils.createKafkaStreamWithId(parameterTool, new MyKafkaDeserializationSchema());

        SingleOutputStreamOperator<DataBean> dataBeanStreamWithID = dataStreamWithId.process(new JsonToBeanFunctionV2());

        SingleOutputStreamOperator<DataBean> filtered = dataBeanStreamWithID.filter(new FilterFunction<DataBean>() {
            @Override
            public boolean filter(DataBean value) throws Exception {
                String eventId = value.getEventId();
                return EventType.LIVE_ENTER.equals(eventId) || EventType.LIVE_LEAVE.equals(eventId);
            }
        });

        //按照主播ID进行KeyBy
        KeyedStream<DataBean, String> keyed = filtered.keyBy(bean -> bean.getProperties().get("anchor_id").toString());

        //在一个KeyedProcessFunction中实现同时计算出累计观众数量和实时在线人数
        SingleOutputStreamOperator<Tuple3<String, Integer, Integer>> res = keyed.process(new AudienceCountFunction());

        //将结果写入到MySQL或Redis中
        res.print();

        FlinkUtils.env.execute();
    }

}
