from pyflink.common import SimpleStringSchema, WatermarkStrategy, Time, Types, Duration
from pyflink.common.watermark_strategy import TimestampAssigner
from pyflink.datastream import StreamExecutionEnvironment
from pyflink.datastream.connectors import DeliveryGuarantee
from pyflink.datastream.connectors.kafka import KafkaSource, KafkaOffsetsInitializer, KafkaSink, \
    KafkaRecordSerializationSchema
import json

from pyflink.datastream.window import SlidingProcessingTimeWindows, TumblingProcessingTimeWindows

# 1、创建flink执行环境
env = StreamExecutionEnvironment.get_execution_environment()

env.set_parallelism(1)

"""
java,1760152838000
java,1760152839000
java,1760152840000
java,1760152841000
java,1760152842000
java,1760152846000
java,1760152844000
java,1760152850000
java,1760152855000
"""

# 2、读取数据
source = KafkaSource.builder() \
    .set_bootstrap_servers("master:9092") \
    .set_topics("event_time") \
    .set_group_id("my-group") \
    .set_starting_offsets(KafkaOffsetsInitializer.latest()) \
    .set_value_only_deserializer(SimpleStringSchema()) \
    .build()

lines_ds = env.from_source(source, WatermarkStrategy.no_watermarks(), "Kafka Source")


# 3、解析数据
def map_fun(line):
    split = line.split(",")
    word = split[0]
    ts = int(split[1])
    return word, ts


words_ts_ds = lines_ds.map(map_fun)


# 时间窗口
# TumblingEventTimeWindows: 滚动的事件时间窗口
# TumblingProcessingTimeWindows: 滚动的处理时间窗口
# SlidingEventTimeWindows: 滑动的事件时间窗口
# SlidingProcessingTimeWindows: 滑动的处理时间窗口

words_ts_ds \
    .map(lambda kv: (kv[0], 1)) \
    .key_by(lambda kv: kv[0]) \
    .window(TumblingProcessingTimeWindows.of(Time.seconds(5))) \
    .reduce(lambda x, y: (x[0], x[1] + y[1])) \
    .print()

env.execute()
