package cn._51doit.live.jobs;

import cn._51doit.live.deserializer.MyKafkaDeserializationSchema;
import cn._51doit.live.pojo.DataBean;
import cn._51doit.live.udf.JsonToBeanFunctionV2;
import cn._51doit.live.utils.Constants;
import cn._51doit.live.utils.FlinkUtils;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.datastream.WindowedStream;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

/**
 * 统计在线人数和累计人数
 *
 * 如果将数据来一条，写入到MySQL一条，这样对MySQL的压力比较到
 * 优化：先划分窗口，然后再聚合
 *
 */
public class OnlineUserCountV2 {

    public static void main(String[] args) throws Exception {

        ParameterTool parameterTool = ParameterTool.fromPropertiesFile(args[0]);

        DataStream<Tuple2<String, String>> lineStreamWithId = FlinkUtils.createKafkaStreamWithId(parameterTool, MyKafkaDeserializationSchema.class);

        SingleOutputStreamOperator<DataBean> beanStream = lineStreamWithId.process(new JsonToBeanFunctionV2());

        SingleOutputStreamOperator<DataBean> filtered = beanStream.filter(bean -> Constants.LIVE_ENTER.equals(bean.getEventId()) || Constants.LIVE_LEAVE.equals(bean.getEventId()));

        WindowedStream<Tuple2<String, Integer>, String, TimeWindow> windowedStream = filtered.map(new MapFunction<DataBean, Tuple2<String, Integer>>() {

            @Override
            public Tuple2<String, Integer> map(DataBean bean) throws Exception {
                String anchorId = bean.getProperties().get("anchor_id").toString();
                int flag = 0;
                String eventId = bean.getEventId();
                if (Constants.LIVE_ENTER.equals(eventId)) {
                    flag = 1;
                } else {
                    flag = -1;
                }

                return Tuple2.of(anchorId, flag);
            }
        }).keyBy(t -> t.f0).window(TumblingProcessingTimeWindows.of(Time.seconds(1)));

        //keyBy后，在划分滚动窗口，先在窗口中增量聚合，窗口触发后再与历史数据进行聚合

        //调用reduce算子，后aggregate，先在窗口内聚合，然后传入WindowFunction，获取历史数据，在进行聚合
        //windowedStream


        FlinkUtils.env.execute();


    }

}
