package com.zyh.rcm.pvuv;

import com.zyh.rcm.entity.PageViewResult;
import com.zyh.rcm.entity.UserBehaviorEvent;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.streaming.api.functions.windowing.ProcessAllWindowFunction;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.io.IOException;
import java.util.Properties;

public class PVJob1 {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment environment = StreamExecutionEnvironment.getExecutionEnvironment();
        //设置时间语义,默认是processTime
        environment.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
//        DataStreamSource<String> dataStream = environment.socketTextStream("hadoop10", 8989);

        /*
            准备参数：
                kafka集群的ip和port
                topic的名字
                数据的反序列化方式
                消费者所属组的名字
         */
        String topicName = "topic-pv-uv";
//        SimpleStringSchema deserialization = new SimpleStringSchema();
        //自定义反序列化方式,从kafka拉取数据直接转换为实体对象
        UserBehaviorEventDeserialization deserialization = new UserBehaviorEventDeserialization();
        Properties props = new Properties();
        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka24:9092");
        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "flink");

        FlinkKafkaConsumer<UserBehaviorEvent> kafkaConsumer = new FlinkKafkaConsumer<>(topicName, deserialization, props);
        //在kafka消费端抽取水位线,越早抽取越好控制
        kafkaConsumer.assignTimestampsAndWatermarks(new UserBehaviorEventWaterMarkerAssigner(2000));

        DataStreamSource<UserBehaviorEvent> dataStream = environment.addSource(kafkaConsumer);





//        SingleOutputStreamOperator<PageViewResult> result = userBehaviorEventStream.windowAll(TumblingProcessingTimeWindows.of(Time.hours(24), Time.hours(-8)))
        SingleOutputStreamOperator<PageViewResult> result = dataStream
//                .assignTimestampsAndWatermarks(new UserBehaviorEventWaterMarkerAssigner(2000))
                .windowAll(TumblingEventTimeWindows.of(Time.seconds(10)))
                .aggregate(new AggregateFunction<UserBehaviorEvent, Integer, Integer>() {
                    @Override
                    public Integer createAccumulator() {
                        return 0;
                    }

                    @Override
                    public Integer add(UserBehaviorEvent value, Integer accumulator) {
                        return accumulator + 1;
                    }

                    @Override
                    public Integer getResult(Integer accumulator) {
                        return accumulator;
                    }

                    @Override
                    public Integer merge(Integer a, Integer b) {
                        return a + b;
                    }
                }, new ProcessAllWindowFunction<Integer, PageViewResult, TimeWindow>() {
                    @Override
                    public void process(Context context, Iterable<Integer> iterable, Collector<PageViewResult> collector) throws Exception {
                        Integer sum = iterable.iterator().next();
                        TimeWindow window = context.window();
                        long start = window.getStart();
                        long end = window.getEnd();

                        collector.collect(new PageViewResult(start, end, sum));
                    }
                });

        result.print();

        environment.execute("pvjob");
    }
}


class UserBehaviorEventWaterMarkerAssigner implements AssignerWithPeriodicWatermarks<UserBehaviorEvent> {
    //允许迟到的时间
    private long allowLateTime;
    //当前最新的事件时间
    private long maxEventTime;

    public UserBehaviorEventWaterMarkerAssigner(long allowLateTime) {
        this.allowLateTime = allowLateTime;
    }

    @Override
    public Watermark getCurrentWatermark() {
        return new Watermark(maxEventTime-allowLateTime);
    }

    @Override
    public long extractTimestamp(UserBehaviorEvent userBehaviorEvent, long l) {
        Long timestamp = userBehaviorEvent.getTs();
        if(timestamp > maxEventTime){
            maxEventTime=timestamp;
        }
        System.out.println("水位线："+(maxEventTime-allowLateTime));
        return timestamp;
    }
}

class UserBehaviorEventDeserialization implements DeserializationSchema<UserBehaviorEvent>{
    private ObjectMapper objectMapper = new ObjectMapper();

    @Override
    public UserBehaviorEvent deserialize(byte[] message) throws IOException {
        String json = new String(message, "utf-8");
        return objectMapper.readValue(json,UserBehaviorEvent.class);
    }

    @Override
    public boolean isEndOfStream(UserBehaviorEvent nextElement) {
        return false;
    }

    @Override
    public TypeInformation<UserBehaviorEvent> getProducedType() {
        return Types.POJO(UserBehaviorEvent.class);
    }
}