package com.study.ProcessFunction;

import com.study.entity.CountWithTimestamp;
import com.study.entity.UserBehavior;
import com.study.utils.JsonHelper;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010;
import org.apache.flink.util.Collector;

import java.util.Date;
import java.util.Properties;

/**
 * @author sjw
 * @className SoureKafka
 * @date 2020/11/20 8:13
 * @description: TODO
 */
public class SoureKafkaTimerProcess {
    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        Properties props = new Properties();
        props.put("bootstrap.servers", "192.168.10.200:9092");
        props.put("zookeeper.connect", "192.168.10.200:2181");
        props.put("group.id", "metric-group");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");  //key 反序列化
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("auto.offset.reset", "latest"); //value 反序列化

        DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer010<String>(
                "logEvent", new SimpleStringSchema(), props)).setParallelism(1);
        dataStreamSource.assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.milliseconds(0)) {
            @Override
            public long extractTimestamp(String element) {
                return JsonHelper.jsonToObject(element, UserBehavior.class).timestamp;
            }
        })
                .map(new MapFunction<String, UserBehavior>() {
                    @Override
                    public UserBehavior map(String s) throws Exception {
                        return JsonHelper.jsonToObject(s, UserBehavior.class);
                    }
                })
//                .assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<UserBehavior>(Time.milliseconds(0)) {
//            @Override
//            public long extractTimestamp(UserBehavior element) {
//                return element.timestamp;
//            }})
//                .filter(new FilterFunction<UserBehavior>() {
//            @Override
//            public boolean filter(UserBehavior userBehavior) throws Exception {
//                return userBehavior.getBehavior().equals("pv");
//            }})
                .keyBy("userId")
//                .countWindow(1,1)
                .process(new KeyedProcessFunction<Tuple, UserBehavior, Object>() {


                    // 用于存储商品与点击数的状态，待收齐同一个窗口的数据后，再触发 TopN 计算
                    //private ListState<UserBehavior> userBehaviorListState;
                    private ValueState<CountWithTimestamp> state;

                    @Override
                    public void open(Configuration parameters) throws Exception {
                        super.open(parameters);
                        // 状态的注册
                        ValueStateDescriptor<CountWithTimestamp> itemsStateDesc = new ValueStateDescriptor<>(
                                "itemState-state",
                                CountWithTimestamp.class);
                        state = getRuntimeContext().getState(itemsStateDesc);
                    }

                    @Override
                    public void processElement(
                            UserBehavior input,
                            Context context,
                            Collector<Object> collector) throws Exception {
                        CountWithTimestamp current = state.value();
                        System.out.println(JsonHelper.objectToJson(input));
                        if (current == null) {
                            current = new CountWithTimestamp();
                            current.key = input.userId;
                            System.out.println("--init state--" + input.userId);
                        }
                        System.out.println("进入事件时间为：" + new Date(context.timestamp()));

                        System.out.println(" processElement " + context.getCurrentKey() + "  " + input.userId + "   " + current.key);
                        System.out.println(" processElement " + context.timestamp() + "  " + input.timestamp + "   ");
                        current.count++;
                        current.lastModified = context.timestamp();
                        // 状态回写
                        state.update(current);
                        //context.timerService().registerProcessingTimeTimer(current.lastModified + 10000);
                        context.timerService().registerEventTimeTimer(current.lastModified+9000);
                        System.out.println("定时触发时间为："+new Date(current.lastModified + 9000));
                    }

                    @Override
                    public void onTimer(
                            long timestamp, KeyedProcessFunction<Tuple, UserBehavior, Object>.OnTimerContext ctx, Collector<Object> out) throws Exception {
                        System.out.println("timer  " + timestamp + "  " +new Date(timestamp)  +  "    key" + ctx.getCurrentKey());
                        CountWithTimestamp countWithTimestamp = state.value();
                        System.out.println("state  " + countWithTimestamp.lastModified + "  " +new Date(countWithTimestamp.lastModified)  +"   key" + countWithTimestamp.key);
                        if (timestamp >= (countWithTimestamp.lastModified + 9000)) {
                            System.out.println("未更新   " + timestamp + "   " + (countWithTimestamp.lastModified + 9000) + "   " + JsonHelper.objectToJson(countWithTimestamp));
                            out.collect(new Tuple3(countWithTimestamp.key, countWithTimestamp.count, countWithTimestamp.lastModified));
                            //state.clear();
                        }else{
                            System.out.println("更新   " + timestamp + "   " + (countWithTimestamp.lastModified + 9000) + "   " + JsonHelper.objectToJson(countWithTimestamp));
                        }
                    }
                }).print("print-----");

        // singleOutputStreamOperator.print();


        env.execute("Flink add data source kafak ");
    }


}
