package com.study.flink.java.day03_windows;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.*;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.timestamps.BoundedOutOfOrdernessTimestampExtractor;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Properties;

//事件型会话窗口
//KafkaSource->并行的source
//如果使用并行的source,创建Kafka的Topic时有多个分区
//每一个source的分区都要满足触发的条件，整个窗口才会触发计算逻辑
public class KafkaSourceEventTimeTumblingWindow {

    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置EventTime作为时间标准，数据所携带的时间
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);// 数据必须源源不断产生

        // kafka的partitions有3个，对应有3个source
        Properties props = new Properties();
        props.setProperty("bootstrap.servers", "node02:9092"); //kafka的broker地址
        props.setProperty("group.id", "gid-wc10");//指定组ID
        props.setProperty("auto.offset.reset", "earliest");//没有记录偏移量，第一次从最开始消费
        //props.setProperty("enable.auto.commit", "false");//自动提交偏移量

        // 1582594382000,spark,3
        // 1582594383000,hadoop,2
        // 用kafka的并行source，每一个组都要满足条件才会触发
        FlinkKafkaConsumer<String> wc10 = new FlinkKafkaConsumer<>("wc10", new SimpleStringSchema(), props);
        DataStream<String> lines = env.addSource(wc10).assignTimestampsAndWatermarks(new BoundedOutOfOrdernessTimestampExtractor<String>(Time.seconds(0)) {
            // 提取时间字段，返回Long类型
            @Override
            public long extractTimestamp(String line) {
                String[] fields = line.split(",");
                return Long.parseLong(fields[0]);
            }
        });

        SingleOutputStreamOperator<Tuple2<String, Integer>> wordAndCount = lines.map(new MapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Tuple2<String, Integer> map(String s) throws Exception {
                // (单词,次数)
                String[] fields = s.split(",");
                return Tuple2.of(fields[1], Integer.parseInt(fields[2]));
            }
        });

        // 先分组，再划分窗口
        KeyedStream<Tuple2<String, Integer>, Tuple> keyed = wordAndCount.keyBy(0);
        // 划分窗口，组内分组满足条件再聚合打印，当前两条数据之差不超过的时间差
        //WindowedStream<Tuple2<String, Integer>, Tuple, TimeWindow> window = keyed.timeWindow(Time.seconds(5));
        WindowedStream<Tuple2<String, Integer>, Tuple, TimeWindow> window = keyed.window(TumblingEventTimeWindows.of(Time.seconds(5))); // 注意窗口会自动对齐

        // 窗口中聚合
        SingleOutputStreamOperator<Tuple2<String, Integer>> summed = window.sum(1);

        summed.print();

        env.execute("KafkaSourceEventTimeTumblingWindow-java");

    }




}
