package com.camemax.kafkatest;

import com.camemax.pojo.ItemViewCount;
import com.camemax.pojo.UserBehavior;
import com.camemax.utils.HotItemOperators;
import com.camemax.utils.ReadFileUtils;
import com.camemax.utils.StreamEnvUtils;
import org.apache.flink.api.common.eventtime.SerializableTimestampAssigner;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.WindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.util.Collector;


import java.io.IOException;
import java.util.Properties;

public class KafkaReadFileHotItems {
    public static void main(String[] args) {

        String topic = "buffer-read";
        String groupId = "test";
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "172.28.40.190:9092");
        props.setProperty("group.id",groupId);
        props.setProperty("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
        props.setProperty("value.serializer","org.apache.kafka.common.serialization.StringSerializer");


        new Thread(()->{
            try {
                ReadFileUtils.readFile2Kafka("E:\\Javas\\BigData-Projects\\Data\\UserBehavior.csv",props,topic);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }).start();

        new Thread(()->{
            try {
                StreamExecutionEnvironment streamEnv = StreamEnvUtils.getStreamEnv(1);
                DataStream<String> kafkaDataStream = streamEnv.addSource(new FlinkKafkaConsumer<>(topic, new SimpleStringSchema(), props));


                // 添加时间戳分配器
                DataStream<UserBehavior> watermarkDataStream = kafkaDataStream.map(line -> {
                    String[] fields = line.split(",");
                    return new UserBehavior(Long.valueOf(fields[0]), Long.valueOf(fields[1]), Integer.valueOf(fields[2]), fields[3], Long.valueOf(fields[4]));
                }).assignTimestampsAndWatermarks(
                        WatermarkStrategy.<UserBehavior>noWatermarks()
                                .withTimestampAssigner((SerializableTimestampAssigner<UserBehavior>) (element, recordTimestamp) -> element.getTimestamp() * 1000L)
                );

                // 分组聚合开窗
                DataStream<ItemViewCount> windowAggregateStream = watermarkDataStream.filter(data -> "pv".equals(data.getBehavior()))
                        .keyBy(UserBehavior::getItemId)
                        .window(SlidingEventTimeWindows.of(Time.hours(1), Time.minutes(5))) // 滑动窗口实现以每小时分桶、每五分钟滑动
                        // aggregate方法Prefix部分 —— 聚合函数
                        // aggregate方法Suffix部分 —— 窗口函数
                        .aggregate(new HotItemOperators.UserBehaviorAggregateFunctionImpl(),
                                new HotItemOperators.ItemViewCountWindowFunctionImpl());


                SingleOutputStreamOperator<String> resultDataStream = windowAggregateStream.keyBy(ItemViewCount::getTimstamp)
                        .process(new HotItemOperators.ItemViewCountKeyedProcessFunctionImpl(5));

                resultDataStream.print();


                streamEnv.execute();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }).start();
    }

}
