package cn._51doit.live.jobs;

import cn._51doit.live.deserializer.MyKafkaDeserializationSchema;
import cn._51doit.live.pojo.DataBean;
import cn._51doit.live.udf.JsonToBeanFunctionV2;
import cn._51doit.live.utils.Constants;
import cn._51doit.live.utils.FlinkUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;

/**
 * 统计在线人数和累计人数
 *
 * 如果将数据来一条，写入到MySQL一条，这样对MySQL的压力比较到
 * 优化：先划分窗口，然后再聚合
 *
 */
public class OnlineUserCount {

    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        DataStream<Tuple2<String, String>> lineStreamWithId = FlinkUtils.createKafkaStreamWithId(parameterTool, MyKafkaDeserializationSchema.class);

        SingleOutputStreamOperator<DataBean> beanStream = lineStreamWithId.process(new JsonToBeanFunctionV2());

        SingleOutputStreamOperator<DataBean> filtered = beanStream.filter(bean -> Constants.LIVE_ENTER.equals(bean.getEventId()) || Constants.LIVE_LEAVE.equals(bean.getEventId()));

        filtered.map(new MapFunction<DataBean, Tuple2<String, Integer>>() {

            @Override
            public Tuple2<String, Integer> map(DataBean bean) throws Exception {
                String anchorId = bean.getProperties().get("anchor_id").toString();
                int flag = 0;
                String eventId = bean.getEventId();
                if (Constants.LIVE_ENTER.equals(eventId)) {
                    flag = 1;
                } else {
                    flag = -1;
                }

                return Tuple2.of(anchorId, flag);
            }
        }).keyBy(t -> t.f0).process(new KeyedProcessFunction<String, Tuple2<String, Integer>, Tuple3<String, Integer, Integer>>() {

            private transient ValueState<Integer> totalCountState;
            private transient ValueState<Integer> onlineCountState;

            @Override
            public void open(Configuration parameters) throws Exception {
                ValueStateDescriptor<Integer> totalStateDescriptor = new ValueStateDescriptor<>("total-count", Integer.class);
                totalCountState = getRuntimeContext().getState(totalStateDescriptor);
                ValueStateDescriptor<Integer> onlineStateDescriptor = new ValueStateDescriptor<>("online-count", Integer.class);
                onlineCountState = getRuntimeContext().getState(onlineStateDescriptor);
            }

            @Override
            public void processElement(Tuple2<String, Integer> value, Context ctx, Collector<Tuple3<String, Integer, Integer>> out) throws Exception {

                //进入为1，离开为-1
                Integer flag = value.f1;

                Integer totalCount = totalCountState.value();
                if (totalCount == null) {
                    totalCount = 0;
                }

                Integer onlineCount = onlineCountState.value();
                if (onlineCount == null) {
                    onlineCount = 0;
                }

                //进入
                if (flag == 1) {
                    totalCount += 1;
                    onlineCount += 1;
                } else {
                    onlineCount -= 1;
                }
                //更新状态
                totalCountState.update(totalCount);
                onlineCountState.update(onlineCount);

                out.collect(Tuple3.of(value.f0, totalCount, onlineCount));


            }
        }).print();

        //将数据写入到MySQL中

        FlinkUtils.env.execute();


    }

}
